Browse Source

x64: Use r10 for the indirect tail call destination (#8407)

* Always use r10 for indirect return calls, and indirect winch calls

Use `r10` for the destination of indirect return calls, and indirect
calls using the winch calling convention, as it is a caller-saved
register.

For tail calls, this ensures that we won't accidentally pick a
callee-saved register for the destination, clobbering it when we restore
callee-saves in the call to `emit_return_call_common_sequence`.

For winch calls, using `r10` instead of `r15` means that it's still
possible to use the pinned register in combination with winch.

* Always use `r11` for the temp in return_call

* Switch ReturnCallUnknown to taking the callee as a Reg
pull/8416/head
Trevor Elliott 7 months ago
committed by GitHub
parent
commit
d6aeb1d702
No known key found for this signature in database GPG Key ID: B5690EEEBB952194
  1. 2
      cranelift/codegen/src/isa/x64/abi.rs
  2. 2
      cranelift/codegen/src/isa/x64/inst.isle
  3. 7
      cranelift/codegen/src/isa/x64/inst/emit.rs
  4. 20
      cranelift/codegen/src/isa/x64/inst/mod.rs
  5. 5
      cranelift/codegen/src/isa/x64/pcc.rs
  6. 34
      cranelift/filetests/filetests/isa/x64/return-call-indirect.clif
  7. 48
      cranelift/filetests/filetests/isa/x64/return-call.clif
  8. 13
      cranelift/filetests/filetests/isa/x64/tail-call-conv.clif
  9. 64
      cranelift/filetests/filetests/isa/x64/winch.clif

2
cranelift/codegen/src/isa/x64/abi.rs

@ -1046,7 +1046,7 @@ impl X64CallSite {
distance: RelocDistance::Far,
});
ctx.emit(Inst::ReturnCallUnknown {
callee: tmp2.to_writable_reg().into(),
callee: tmp2.to_reg().to_reg(),
info,
});
}

2
cranelift/codegen/src/isa/x64/inst.isle

@ -545,7 +545,7 @@
(info BoxReturnCallInfo))
;; Tail call to an indirect destination.
(ReturnCallUnknown (callee RegMem)
(ReturnCallUnknown (callee Reg)
(info BoxReturnCallInfo))
;; A pseudo-instruction that captures register arguments in vregs.

7
cranelift/codegen/src/isa/x64/inst/emit.rs

@ -1647,11 +1647,14 @@ pub(crate) fn emit(
callee,
info: call_info,
} => {
let callee = callee.with_allocs(allocs);
let callee = allocs.next(*callee);
emit_return_call_common_sequence(allocs, sink, info, state, &call_info);
Inst::JmpUnknown { target: callee }.emit(&[], sink, info, state);
Inst::JmpUnknown {
target: RegMem::reg(callee),
}
.emit(&[], sink, info, state);
sink.add_call_site(ir::Opcode::ReturnCallIndirect);
}

20
cranelift/codegen/src/isa/x64/inst/mod.rs

@ -1697,7 +1697,7 @@ impl PrettyPrint for Inst {
new_stack_arg_size,
tmp,
} = &**info;
let callee = callee.pretty_print(8, allocs);
let callee = pretty_print_reg(*callee, 8, allocs);
let tmp = pretty_print_reg(tmp.to_reg().to_reg(), 8, allocs);
let mut s =
format!("return_call_unknown {callee} ({new_stack_arg_size}) tmp={tmp}");
@ -2347,8 +2347,9 @@ fn x64_get_operands<F: Fn(VReg) -> VReg>(inst: &Inst, collector: &mut OperandCol
match dest {
RegMem::Reg { reg } if info.callee_conv == CallConv::Winch => {
// TODO(https://github.com/bytecodealliance/regalloc2/issues/145):
// This shouldn't be a fixed register constraint.
collector.reg_fixed_use(*reg, regs::r15())
// This shouldn't be a fixed register constraint. r10 is caller-saved, so this
// should be safe to use.
collector.reg_fixed_use(*reg, regs::r10())
}
_ => dest.get_operands(collector),
}
@ -2363,7 +2364,7 @@ fn x64_get_operands<F: Fn(VReg) -> VReg>(inst: &Inst, collector: &mut OperandCol
Inst::ReturnCallKnown { callee, info } => {
let ReturnCallInfo { uses, tmp, .. } = &**info;
collector.reg_early_def(tmp.to_writable_reg());
collector.reg_fixed_def(tmp.to_writable_reg(), regs::r11());
// Same as in the `Inst::CallKnown` branch.
debug_assert_ne!(*callee, ExternalName::LibCall(LibCall::Probestack));
for u in uses {
@ -2373,8 +2374,15 @@ fn x64_get_operands<F: Fn(VReg) -> VReg>(inst: &Inst, collector: &mut OperandCol
Inst::ReturnCallUnknown { callee, info } => {
let ReturnCallInfo { uses, tmp, .. } = &**info;
callee.get_operands(collector);
collector.reg_early_def(tmp.to_writable_reg());
// TODO(https://github.com/bytecodealliance/regalloc2/issues/145):
// This shouldn't be a fixed register constraint, but it's not clear how to
// pick a register that won't be clobbered by the callee-save restore code
// emitted with a return_call_indirect. r10 is caller-saved, so this should be
// safe to use.
collector.reg_fixed_use(*callee, regs::r10());
collector.reg_fixed_def(tmp.to_writable_reg(), regs::r11());
for u in uses {
collector.reg_fixed_use(u.vreg, u.preg);
}

5
cranelift/codegen/src/isa/x64/pcc.rs

@ -838,10 +838,9 @@ pub(crate) fn check(
| Inst::Ud2 { .. } => Ok(()),
Inst::Rets { .. } => Ok(()),
Inst::ReturnCallUnknown { .. } => Ok(()),
Inst::CallUnknown { ref dest, .. }
| Inst::ReturnCallUnknown {
callee: ref dest, ..
}
| Inst::JmpUnknown {
target: ref dest, ..
} => match <&RegMem>::from(dest) {

34
cranelift/filetests/filetests/isa/x64/return-call-indirect.clif

@ -42,18 +42,18 @@ block0(v0: i64):
; pushq %rbp
; movq %rsp, %rbp
; block0:
; load_ext_name %callee_i64+0, %rcx
; return_call_unknown %rcx (0) tmp=%rdx %rdi=%rdi
; load_ext_name %callee_i64+0, %r10
; return_call_unknown %r10 (0) tmp=%r11 %rdi=%rdi
;
; Disassembled:
; block0: ; offset 0x0
; pushq %rbp
; movq %rsp, %rbp
; block1: ; offset 0x4
; movabsq $0, %rcx ; reloc_external Abs8 %callee_i64 0
; movabsq $0, %r10 ; reloc_external Abs8 %callee_i64 0
; movq %rbp, %rsp
; popq %rbp
; jmpq *%rcx
; jmpq *%r10
;;;; Test colocated tail calls ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -70,18 +70,18 @@ block0(v0: i64):
; pushq %rbp
; movq %rsp, %rbp
; block0:
; load_ext_name %callee_i64+0, %rcx
; return_call_unknown %rcx (0) tmp=%rdx %rdi=%rdi
; load_ext_name %callee_i64+0, %r10
; return_call_unknown %r10 (0) tmp=%r11 %rdi=%rdi
;
; Disassembled:
; block0: ; offset 0x0
; pushq %rbp
; movq %rsp, %rbp
; block1: ; offset 0x4
; leaq (%rip), %rcx ; reloc_external CallPCRel4 %callee_i64 -4
; leaq (%rip), %r10 ; reloc_external CallPCRel4 %callee_i64 -4
; movq %rbp, %rsp
; popq %rbp
; jmpq *%rcx
; jmpq *%r10
;;;; Test passing `f64`s ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -139,18 +139,18 @@ block0(v0: f64):
; pushq %rbp
; movq %rsp, %rbp
; block0:
; load_ext_name %callee_f64+0, %rcx
; return_call_unknown %rcx (0) tmp=%rdx %xmm0=%xmm0
; load_ext_name %callee_f64+0, %r10
; return_call_unknown %r10 (0) tmp=%r11 %xmm0=%xmm0
;
; Disassembled:
; block0: ; offset 0x0
; pushq %rbp
; movq %rsp, %rbp
; block1: ; offset 0x4
; movabsq $0, %rcx ; reloc_external Abs8 %callee_f64 0
; movabsq $0, %r10 ; reloc_external Abs8 %callee_f64 0
; movq %rbp, %rsp
; popq %rbp
; jmpq *%rcx
; jmpq *%r10
;;;; Test passing `i8`s ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -195,18 +195,18 @@ block0(v0: i8):
; pushq %rbp
; movq %rsp, %rbp
; block0:
; load_ext_name %callee_i8+0, %rcx
; return_call_unknown %rcx (0) tmp=%rdx %rdi=%rdi
; load_ext_name %callee_i8+0, %r10
; return_call_unknown %r10 (0) tmp=%r11 %rdi=%rdi
;
; Disassembled:
; block0: ; offset 0x0
; pushq %rbp
; movq %rsp, %rbp
; block1: ; offset 0x4
; movabsq $0, %rcx ; reloc_external Abs8 %callee_i8 0
; movabsq $0, %r10 ; reloc_external Abs8 %callee_i8 0
; movq %rbp, %rsp
; popq %rbp
; jmpq *%rcx
; jmpq *%r10
;;;; Test passing many arguments on stack ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -335,7 +335,7 @@ block0:
; movq rsp(72 + virtual offset), %r8
; movq rsp(64 + virtual offset), %r9
; movq rsp(0 + virtual offset), %r10
; return_call_unknown %r10 (160) tmp=%rax %rdi=%rdi %rsi=%rsi %rdx=%rdx %rcx=%rcx %r8=%r8 %r9=%r9
; return_call_unknown %r10 (160) tmp=%r11 %rdi=%rdi %rsi=%rsi %rdx=%rdx %rcx=%rcx %r8=%r8 %r9=%r9
;
; Disassembled:
; block0: ; offset 0x0

48
cranelift/filetests/filetests/isa/x64/return-call.clif

@ -40,18 +40,18 @@ block0(v0: i64):
; pushq %rbp
; movq %rsp, %rbp
; block0:
; load_ext_name %callee_i64+0, %rcx
; return_call_unknown %rcx (0) tmp=%rdx %rdi=%rdi
; load_ext_name %callee_i64+0, %r10
; return_call_unknown %r10 (0) tmp=%r11 %rdi=%rdi
;
; Disassembled:
; block0: ; offset 0x0
; pushq %rbp
; movq %rsp, %rbp
; block1: ; offset 0x4
; movabsq $0, %rcx ; reloc_external Abs8 %callee_i64 0
; movabsq $0, %r10 ; reloc_external Abs8 %callee_i64 0
; movq %rbp, %rsp
; popq %rbp
; jmpq *%rcx
; jmpq *%r10
;;;; Test colocated tail calls ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -66,7 +66,7 @@ block0(v0: i64):
; pushq %rbp
; movq %rsp, %rbp
; block0:
; return_call_known TestCase(%callee_i64) (0) tmp=%rax %rdi=%rdi
; return_call_known TestCase(%callee_i64) (0) tmp=%r11 %rdi=%rdi
;
; Disassembled:
; block0: ; offset 0x0
@ -131,18 +131,18 @@ block0(v0: f64):
; pushq %rbp
; movq %rsp, %rbp
; block0:
; load_ext_name %callee_f64+0, %rcx
; return_call_unknown %rcx (0) tmp=%rdx %xmm0=%xmm0
; load_ext_name %callee_f64+0, %r10
; return_call_unknown %r10 (0) tmp=%r11 %xmm0=%xmm0
;
; Disassembled:
; block0: ; offset 0x0
; pushq %rbp
; movq %rsp, %rbp
; block1: ; offset 0x4
; movabsq $0, %rcx ; reloc_external Abs8 %callee_f64 0
; movabsq $0, %r10 ; reloc_external Abs8 %callee_f64 0
; movq %rbp, %rsp
; popq %rbp
; jmpq *%rcx
; jmpq *%r10
;;;; Test passing `i8`s ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -185,18 +185,18 @@ block0(v0: i8):
; pushq %rbp
; movq %rsp, %rbp
; block0:
; load_ext_name %callee_i8+0, %rcx
; return_call_unknown %rcx (0) tmp=%rdx %rdi=%rdi
; load_ext_name %callee_i8+0, %r10
; return_call_unknown %r10 (0) tmp=%r11 %rdi=%rdi
;
; Disassembled:
; block0: ; offset 0x0
; pushq %rbp
; movq %rsp, %rbp
; block1: ; offset 0x4
; movabsq $0, %rcx ; reloc_external Abs8 %callee_i8 0
; movabsq $0, %r10 ; reloc_external Abs8 %callee_i8 0
; movq %rbp, %rsp
; popq %rbp
; jmpq *%rcx
; jmpq *%r10
;;;; Test passing fewer arguments on the stack ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -243,7 +243,7 @@ block0(v0: i32, v1: i32, v2: i32, v3: i32, v4: i32, v5: i32, v6: i32, v7: i32, v
; movq rbp(stack args max - 24), %r9
; movq rbp(stack args max - 16), %rax
; movl %eax, rbp(stack args max - 16)
; return_call_known TestCase(%one_stack_arg) (16) tmp=%r10 %rdi=%rdi %rsi=%rsi %rdx=%rdx %rcx=%rcx %r8=%r8 %r9=%r9
; return_call_known TestCase(%one_stack_arg) (16) tmp=%r11 %rdi=%rdi %rsi=%rsi %rdx=%rdx %rcx=%rcx %r8=%r8 %r9=%r9
;
; Disassembled:
; block0: ; offset 0x0
@ -260,8 +260,8 @@ block0(v0: i32, v1: i32, v2: i32, v3: i32, v4: i32, v5: i32, v6: i32, v7: i32, v
; movl %eax, 0x20(%rbp)
; movq %rbp, %rsp
; popq %rbp
; movq (%rsp), %r10
; movq %r10, 0x10(%rsp)
; movq (%rsp), %r11
; movq %r11, 0x10(%rsp)
; addq $0x10, %rsp
; jmp 0x35 ; reloc_external CallPCRel4 %one_stack_arg -4
@ -279,7 +279,7 @@ block0(v0: i32, v1: i32, v2: i32, v3: i32, v4: i32, v5: i32, v6: i32, v7: i32, v
; movq rbp(stack args max - 32), %r10
; movq rbp(stack args max - 24), %rsi
; movq rbp(stack args max - 16), %rdi
; return_call_known TestCase(%callee_i8) (0) tmp=%rdx %rdi=%rdi
; return_call_known TestCase(%callee_i8) (0) tmp=%r11 %rdi=%rdi
;
; Disassembled:
; block0: ; offset 0x0
@ -291,8 +291,8 @@ block0(v0: i32, v1: i32, v2: i32, v3: i32, v4: i32, v5: i32, v6: i32, v7: i32, v
; movq 0x20(%rbp), %rdi
; movq %rbp, %rsp
; popq %rbp
; movq (%rsp), %rdx
; movq %rdx, 0x20(%rsp)
; movq (%rsp), %r11
; movq %r11, 0x20(%rsp)
; addq $0x20, %rsp
; jmp 0x26 ; reloc_external CallPCRel4 %callee_i8 -4
@ -325,7 +325,7 @@ block0(v0: i32, v1: i32, v2: i32, v3: i32, v4: i32, v5: i32, v6: i32):
; movl %esi, rbp(stack args max - 16)
; movq %rsi, %rdi
; movq %r10, %rsi
; return_call_known TestCase(%call_one_stack_arg) (32) tmp=%r10 %rdi=%rdi %rsi=%rsi %rdx=%rdx %rcx=%rcx %r8=%r8 %r9=%r9
; return_call_known TestCase(%call_one_stack_arg) (32) tmp=%r11 %rdi=%rdi %rsi=%rsi %rdx=%rdx %rcx=%rcx %r8=%r8 %r9=%r9
;
; Disassembled:
; block0: ; offset 0x0
@ -531,14 +531,14 @@ block0:
; movq %rsi, rbp(stack args max - 16)
; movq rsp(0 + virtual offset), %rsi
; movq %rsi, rbp(stack args max - 8)
; load_ext_name %tail_callee_stack_args+0, %rax
; load_ext_name %tail_callee_stack_args+0, %r10
; movq rsp(72 + virtual offset), %rcx
; movq rsp(80 + virtual offset), %rdx
; movq rsp(88 + virtual offset), %rsi
; movq rsp(96 + virtual offset), %rdi
; movq rsp(64 + virtual offset), %r8
; movq rsp(56 + virtual offset), %r9
; return_call_unknown %rax (160) tmp=%r10 %rdi=%rdi %rsi=%rsi %rdx=%rdx %rcx=%rcx %r8=%r8 %r9=%r9
; return_call_unknown %r10 (160) tmp=%r11 %rdi=%rdi %rsi=%rsi %rdx=%rdx %rcx=%rcx %r8=%r8 %r9=%r9
;
; Disassembled:
; block0: ; offset 0x0
@ -623,7 +623,7 @@ block0:
; movq %rsi, 0xa0(%rbp)
; movq (%rsp), %rsi
; movq %rsi, 0xa8(%rbp)
; movabsq $0, %rax ; reloc_external Abs8 %tail_callee_stack_args 0
; movabsq $0, %r10 ; reloc_external Abs8 %tail_callee_stack_args 0
; movq 0x48(%rsp), %rcx
; movq 0x50(%rsp), %rdx
; movq 0x58(%rsp), %rsi
@ -638,5 +638,5 @@ block0:
; addq $0xa0, %rsp
; movq %rbp, %rsp
; popq %rbp
; jmpq *%rax
; jmpq *%r10

13
cranelift/filetests/filetests/isa/x64/tail-call-conv.clif

@ -895,7 +895,6 @@ block0:
; popq %rbp
; retq
;; Test that tail calls that shrink the argument area don't clobber the location
;; of an indirect jump
@ -916,8 +915,8 @@ block0(v0: f64, v1: f64, v2: i8, v3: i32, v4: i128, v5: i32, v6: i128, v7: i32,
; movq rbp(stack args max - 24), %rax
; movq rbp(stack args max - 16), %rdx
; movq rbp(stack args max - 8), %r9
; load_ext_name %callee_simple+0, %rsi
; return_call_unknown %rsi (0) tmp=%rdi
; load_ext_name %callee_simple+0, %r10
; return_call_unknown %r10 (0) tmp=%r11
;
; Disassembled:
; block0: ; offset 0x0
@ -928,11 +927,11 @@ block0(v0: f64, v1: f64, v2: i8, v3: i32, v4: i128, v5: i32, v6: i128, v7: i32,
; movq 0x18(%rbp), %rax
; movq 0x20(%rbp), %rdx
; movq 0x28(%rbp), %r9
; movabsq $0, %rsi ; reloc_external Abs8 %callee_simple 0
; movabsq $0, %r10 ; reloc_external Abs8 %callee_simple 0
; movq %rbp, %rsp
; popq %rbp
; movq (%rsp), %rdi
; movq %rdi, 0x20(%rsp)
; movq (%rsp), %r11
; movq %r11, 0x20(%rsp)
; addq $0x20, %rsp
; jmpq *%rsi
; jmpq *%r10

64
cranelift/filetests/filetests/isa/x64/winch.clif

@ -38,8 +38,8 @@ block0(v0:i64, v1:i64, v2:i64, v3:i64, v4:i64, v5:i64):
; subq %rsp, $16, %rsp
; block0:
; movq %rdi, rsp(0 + virtual offset)
; load_ext_name %g+0, %r15
; call *%r15
; load_ext_name %g+0, %r10
; call *%r10
; movq rsp(0 + virtual offset), %rax
; addq %rsp, $16, %rsp
; movq %rbp, %rsp
@ -53,8 +53,8 @@ block0(v0:i64, v1:i64, v2:i64, v3:i64, v4:i64, v5:i64):
; subq $0x10, %rsp
; block1: ; offset 0x8
; movq %rdi, (%rsp)
; movabsq $0, %r15 ; reloc_external Abs8 %g 0
; callq *%r15
; movabsq $0, %r10 ; reloc_external Abs8 %g 0
; callq *%r10
; movq (%rsp), %rax
; addq $0x10, %rsp
; movq %rbp, %rsp
@ -81,8 +81,8 @@ block0(v0:i64, v1:i64, v2:i64, v3:i64, v4:i64, v5:i64):
; movq %r15, 48(%rsp)
; block0:
; movq %rdi, rsp(0 + virtual offset)
; load_ext_name %g+0, %r15
; call *%r15
; load_ext_name %g+0, %r10
; call *%r10
; movq rsp(0 + virtual offset), %rax
; movq 16(%rsp), %rbx
; movq 24(%rsp), %r12
@ -106,8 +106,8 @@ block0(v0:i64, v1:i64, v2:i64, v3:i64, v4:i64, v5:i64):
; movq %r15, 0x30(%rsp)
; block1: ; offset 0x21
; movq %rdi, (%rsp)
; movabsq $0, %r15 ; reloc_external Abs8 %g 0
; callq *%r15
; movabsq $0, %r10 ; reloc_external Abs8 %g 0
; callq *%r10
; movq (%rsp), %rax
; movq 0x10(%rsp), %rbx
; movq 0x18(%rsp), %r12
@ -135,8 +135,8 @@ block0(v0:i64, v1:i64, v2:i64, v3:i64, v4:i64, v5:i64):
; movq %rdi, %rax
; movq %r9, %rdi
; movq %rax, %r9
; load_ext_name %g+0, %r15
; call *%r15
; load_ext_name %g+0, %r10
; call *%r10
; movq %rbp, %rsp
; popq %rbp
; ret
@ -149,8 +149,8 @@ block0(v0:i64, v1:i64, v2:i64, v3:i64, v4:i64, v5:i64):
; movq %rdi, %rax
; movq %r9, %rdi
; movq %rax, %r9
; movabsq $0, %r15 ; reloc_external Abs8 %g 0
; callq *%r15
; movabsq $0, %r10 ; reloc_external Abs8 %g 0
; callq *%r10
; movq %rbp, %rsp
; popq %rbp
; retq
@ -177,8 +177,8 @@ block0(v0:i64, v1:i64, v2:i64, v3:i64, v4:i64, v5:i64):
; movq %rdi, %rax
; movq %r9, %rdi
; movq %rax, %r9
; load_ext_name %g+0, %r15
; call *%r15
; load_ext_name %g+0, %r10
; call *%r10
; movq 0(%rsp), %rbx
; movq 8(%rsp), %r12
; movq 16(%rsp), %r13
@ -203,8 +203,8 @@ block0(v0:i64, v1:i64, v2:i64, v3:i64, v4:i64, v5:i64):
; movq %rdi, %rax
; movq %r9, %rdi
; movq %rax, %r9
; movabsq $0, %r15 ; reloc_external Abs8 %g 0
; callq *%r15
; movabsq $0, %r10 ; reloc_external Abs8 %g 0
; callq *%r10
; movq (%rsp), %rbx
; movq 8(%rsp), %r12
; movq 0x10(%rsp), %r13
@ -236,12 +236,12 @@ block0:
; movq %r14, 40(%rsp)
; movq %r15, 48(%rsp)
; block0:
; load_ext_name userextname0+0, %r15
; movq %r15, rsp(0 + virtual offset)
; movq rsp(0 + virtual offset), %r15
; call *%r15
; movq rsp(0 + virtual offset), %r15
; call *%r15
; load_ext_name userextname0+0, %r10
; movq %r10, rsp(0 + virtual offset)
; movq rsp(0 + virtual offset), %r10
; call *%r10
; movq rsp(0 + virtual offset), %r10
; call *%r10
; movq 16(%rsp), %rbx
; movq 24(%rsp), %r12
; movq 32(%rsp), %r13
@ -263,12 +263,12 @@ block0:
; movq %r14, 0x28(%rsp)
; movq %r15, 0x30(%rsp)
; block1: ; offset 0x21
; movabsq $0, %r15 ; reloc_external Abs8 u2:0 0
; movq %r15, (%rsp)
; movq (%rsp), %r15
; callq *%r15
; movq (%rsp), %r15
; callq *%r15
; movabsq $0, %r10 ; reloc_external Abs8 u2:0 0
; movq %r10, (%rsp)
; movq (%rsp), %r10
; callq *%r10
; movq (%rsp), %r10
; callq *%r10
; movq 0x10(%rsp), %rbx
; movq 0x18(%rsp), %r12
; movq 0x20(%rsp), %r13
@ -301,8 +301,8 @@ block0(v0:i64):
; movq %r15, 48(%rsp)
; block0:
; lea 0(%rsp), %rdi
; load_ext_name %g+0, %r15
; call *%r15
; load_ext_name %g+0, %r10
; call *%r10
; movq 4(%rsp), %rax
; movq 0(%rsp), %r11
; andl %eax, %r11d, %eax
@ -328,8 +328,8 @@ block0(v0:i64):
; movq %r15, 0x30(%rsp)
; block1: ; offset 0x21
; leaq (%rsp), %rdi
; movabsq $0, %r15 ; reloc_external Abs8 %g 0
; callq *%r15
; movabsq $0, %r10 ; reloc_external Abs8 %g 0
; callq *%r10
; movq 4(%rsp), %rax
; movq (%rsp), %r11
; andl %r11d, %eax

Loading…
Cancel
Save