Browse Source

winch: Enable stack overflow checking in trampolines (#7979)

* Check for stack overflow in trampolines

* More consistent use of `float_bytes` in the x64 masm
pull/7981/head
Trevor Elliott 9 months ago
committed by GitHub
parent
commit
95b37db7c4
No known key found for this signature in database GPG Key ID: B5690EEEBB952194
  1. 2
      winch/codegen/src/codegen/mod.rs
  2. 2
      winch/codegen/src/isa/aarch64/masm.rs
  3. 11
      winch/codegen/src/isa/x64/masm.rs
  4. 2
      winch/codegen/src/masm.rs
  5. 19
      winch/codegen/src/trampoline.rs

2
winch/codegen/src/codegen/mod.rs

@ -94,7 +94,7 @@ where
// Stack overflow checks must occur during the function prologue to ensure that unwinding
// will not assume they're user-handlable exceptions. As the `save_clobbers` call below
// marks the end of the prologue for unwinding annotations, we make the stack check here.
self.masm.check_stack();
self.masm.check_stack(vmctx!(M));
// We don't have any callee save registers in the winch calling convention, but
// `save_clobbers` does some useful work for setting up unwinding state, and marks the end

2
winch/codegen/src/isa/aarch64/masm.rs

@ -49,7 +49,7 @@ impl Masm for MacroAssembler {
self.move_sp_to_shadow_sp();
}
fn check_stack(&mut self) {
fn check_stack(&mut self, _vmctx: Reg) {
// TODO: Implement when we have more complete assembler support.
}

11
winch/codegen/src/isa/x64/masm.rs

@ -77,12 +77,12 @@ impl Masm for MacroAssembler {
.mov_rr(stack_pointer, frame_pointer, OperandSize::S64);
}
fn check_stack(&mut self) {
fn check_stack(&mut self, vmctx: Reg) {
let ptr_size: u8 = self.ptr_size.bytes().try_into().unwrap();
let scratch = regs::scratch();
self.load_ptr(
self.address_at_vmctx(ptr_size.vmcontext_runtime_limits().into()),
self.address_at_reg(vmctx, ptr_size.vmcontext_runtime_limits().into()),
scratch,
);
@ -110,7 +110,7 @@ impl Masm for MacroAssembler {
RegClass::Float => align_to(total, float_bytes) + float_bytes,
RegClass::Vector => unimplemented!(),
}),
16,
float_bytes,
);
// Emit unwind info.
@ -148,6 +148,9 @@ impl Masm for MacroAssembler {
}
fn restore_clobbers(&mut self, clobbers: &[(Reg, OperandSize)]) {
let int_bytes: u32 = Self::ABI::word_bytes().try_into().unwrap();
let float_bytes = int_bytes * 2;
let mut off = 0;
for &(reg, size) in clobbers {
// Align the current offset
@ -161,7 +164,7 @@ impl Masm for MacroAssembler {
off += size.bytes();
}
self.free_stack(align_to(off, 16));
self.free_stack(align_to(off, float_bytes));
}
fn push(&mut self, reg: Reg, size: OperandSize) -> StackSlot {

2
winch/codegen/src/masm.rs

@ -488,7 +488,7 @@ pub(crate) trait MacroAssembler {
}
/// Emit a stack check.
fn check_stack(&mut self);
fn check_stack(&mut self, vmctx: Reg);
/// Emit the function epilogue.
fn epilogue(&mut self, locals_size: u32);

19
winch/codegen/src/trampoline.rs

@ -98,11 +98,12 @@ where
.map(|operand| RegImm::reg(operand.unwrap_reg()))
.ok_or_else(|| anyhow!("Expected value pointer to be in a register"))?;
self.prologue_with_callee_saved();
// Assign the caller and caller VMContext arguments.
let (vmctx, caller_vmctx) = Self::callee_and_caller_vmctx(&array_sig.params)?;
let (dst_callee_vmctx, dst_caller_vmctx) = Self::callee_and_caller_vmctx(&wasm_sig.params)?;
self.prologue_with_callee_saved(caller_vmctx);
self.masm
.mov(vmctx.into(), dst_callee_vmctx, self.pointer_type.into());
self.masm.mov(
@ -194,9 +195,9 @@ where
pub fn emit_native_to_wasm(mut self, ty: &WasmFuncType, callee_index: FuncIndex) -> Result<()> {
let native_sig = native_sig::<M::ABI>(&ty, &self.call_conv);
let wasm_sig = wasm_sig::<M::ABI>(&ty);
let (vmctx, _) = Self::callee_and_caller_vmctx(&native_sig.params)?;
let (vmctx, caller_vmctx) = Self::callee_and_caller_vmctx(&native_sig.params)?;
self.prologue_with_callee_saved();
self.prologue_with_callee_saved(caller_vmctx);
let vmctx_runtime_limits_addr = self.vmctx_runtime_limits_addr(vmctx);
let ret_area = self.make_ret_area(&wasm_sig);
@ -363,7 +364,7 @@ where
let (vmctx, caller_vmctx) = Self::callee_and_caller_vmctx(&wasm_sig.params).unwrap();
let vmctx_runtime_limits_addr = self.vmctx_runtime_limits_addr(caller_vmctx);
self.prologue();
self.prologue(caller_vmctx);
// Save the FP and return address when exiting Wasm.
// TODO: Once Winch supports comparison operators,
@ -620,17 +621,17 @@ where
}
/// The trampoline's prologue.
fn prologue(&mut self) {
fn prologue(&mut self, vmctx: Reg) {
self.masm.prologue();
// TODO: emit a stack check
self.masm.check_stack(vmctx);
self.masm.save_clobbers(&[]);
}
/// Similar to [Trampoline::prologue], but saves
/// callee-saved registers.
fn prologue_with_callee_saved(&mut self) {
fn prologue_with_callee_saved(&mut self, vmctx: Reg) {
self.masm.prologue();
// TODO: emit a stack check
self.masm.check_stack(vmctx);
// Save any callee-saved registers.
self.masm.save_clobbers(&self.callee_saved_regs);
}

Loading…
Cancel
Save