Browse Source

PCC: check facts on loaded and stored values, infer facts where needed, and add a basic vmctx/memory example. (#7231)

* PCC: add checking of struct field types on loads and stores.

Co-authored-by: Nick Fitzgerald <fitzgen@gmail.com>

* PCC: check that blockparams uphold facts appropriately.

Co-authored-by: Nick Fitzgerald <fitzgen@gmail.com>

* PCC: infer facts based on types' value ranges; add a mock vmctx example.

Co-authored-by: Nick Fitzgerald <fitzgen@gmail.com>

* Fix formatting.

* Add two fixes required by new default facts:

- Allow any fact at all to subsume a trivially-true range fact that
  states a bitslice is within the maximum range for its width.

- Clamp the maximum value of a bitslice at the maximum value for the
  slice's width.

---------

Co-authored-by: Nick Fitzgerald <fitzgen@gmail.com>
pch/bound_tcp_userland_buffer
Chris Fallin 1 year ago
committed by GitHub
parent
commit
2e7f9414bb
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
  1. 10
      cranelift/codegen/src/ir/memtype.rs
  2. 193
      cranelift/codegen/src/ir/pcc.rs
  3. 11
      cranelift/codegen/src/isa/aarch64/inst/args.rs
  4. 10
      cranelift/codegen/src/isa/aarch64/lower.rs
  5. 203
      cranelift/codegen/src/isa/aarch64/pcc.rs
  6. 6
      cranelift/codegen/src/machinst/lower.rs
  7. 10
      cranelift/codegen/src/machinst/vcode.rs
  8. 19
      cranelift/filetests/filetests/pcc/fail/blockparams.clif
  9. 19
      cranelift/filetests/filetests/pcc/fail/struct.clif
  10. 37
      cranelift/filetests/filetests/pcc/fail/vmctx.clif
  11. 19
      cranelift/filetests/filetests/pcc/succeed/blockparams.clif
  12. 19
      cranelift/filetests/filetests/pcc/succeed/struct.clif
  13. 20
      cranelift/filetests/filetests/pcc/succeed/vmctx.clif

10
cranelift/codegen/src/ir/memtype.rs

@ -159,6 +159,16 @@ pub struct MemoryTypeField {
pub readonly: bool,
}
impl MemoryTypeField {
/// Get the fact, if any, on a field. Fills in a default inferred
/// fact based on the type if no explicit fact is present.
pub fn fact(&self) -> Option<&Fact> {
self.fact
.as_ref()
.or_else(|| Fact::infer_from_type(self.ty))
}
}
impl MemoryTypeData {
/// Provide the static size of this type, if known.
///

193
cranelift/codegen/src/ir/pcc.rs

@ -46,20 +46,40 @@
//! `FactContext::add()` and friends to forward-propagate facts.
//!
//! TODO:
//! - Check blockparams' preds against blockparams' facts.
//!
//! Completeness:
//! - Propagate facts through optimization (egraph layer).
//! - Generate facts in cranelift-wasm frontend when lowering memory ops.
//! - Implement richer "points-to" facts that describe the pointed-to
//! memory, so the loaded values can also have facts.
//! - Support bounds-checking-type operations for dynamic memories and
//! tables.
//!
//! Generality:
//! - facts on outputs (in func signature)?
//! - Implement checking at the CLIF level as well.
//! - Check instructions that can trap as well?
//!
//! Nicer errors:
//! - attach instruction index or some other identifier to errors
//!
//! Refactoring:
//! - avoid the "default fact" infra everywhere we fetch facts,
//! instead doing it in the subsume check (and take the type with
//! subsume)?
//!
//! Text format cleanup:
//! - make the bitwidth on `max` facts optional in the CLIF text
//! format?
//! - make offset in `mem` fact optional in the text format?
//!
//! Bikeshed colors (syntax):
//! - Put fact bang-annotations after types?
//! `v0: i64 ! fact(..)` vs. `v0 ! fact(..): i64`
use crate::ir;
use crate::ir::types::*;
use crate::isa::TargetIsa;
use crate::machinst::{InsnIndex, LowerBackend, MachInst, VCode};
use cranelift_entity::PrimaryMap;
use crate::machinst::{BlockIndex, LowerBackend, MachInst, VCode};
use regalloc2::Function as _;
use std::fmt;
#[cfg(feature = "enable-serde")]
@ -81,6 +101,9 @@ pub enum PccError {
/// A derivation of an output fact is unsupported (incorrect or
/// not derivable).
UnsupportedFact,
/// A block parameter claims a fact that one of its predecessors
/// does not support.
UnsupportedBlockparam,
/// A memory access is out of bounds.
OutOfBounds,
/// Proof-carrying-code checking is not implemented for a
@ -90,6 +113,15 @@ pub enum PccError {
/// particular instruction that instruction-selection chose. This
/// is an internal compiler error.
UnimplementedInst,
/// Access to an invalid or undefined field offset in a struct.
InvalidFieldOffset,
/// Access to a field via the wrong type.
BadFieldType,
/// Store to a read-only field.
WriteToReadOnlyField,
/// Store of data to a field with a fact that does not subsume the
/// field's fact.
InvalidStoredFact,
}
/// A fact on a value.
@ -130,6 +162,37 @@ impl fmt::Display for Fact {
}
}
impl Fact {
/// Try to infer a minimal fact for a value of the given IR type.
pub fn infer_from_type(ty: ir::Type) -> Option<&'static Self> {
static FACTS: [Fact; 4] = [
Fact::ValueMax {
bit_width: 8,
max: u8::MAX as u64,
},
Fact::ValueMax {
bit_width: 16,
max: u16::MAX as u64,
},
Fact::ValueMax {
bit_width: 32,
max: u32::MAX as u64,
},
Fact::ValueMax {
bit_width: 64,
max: u64::MAX,
},
];
match ty {
I8 => Some(&FACTS[0]),
I16 => Some(&FACTS[1]),
I32 => Some(&FACTS[2]),
I64 => Some(&FACTS[3]),
_ => None,
}
}
}
macro_rules! ensure {
( $condition:expr, $err:tt $(,)? ) => {
if !$condition {
@ -148,18 +211,15 @@ macro_rules! bail {
/// context carries environment/global properties, such as the machine
/// pointer width.
pub struct FactContext<'a> {
memory_types: &'a PrimaryMap<ir::MemoryType, ir::MemoryTypeData>,
function: &'a ir::Function,
pointer_width: u16,
}
impl<'a> FactContext<'a> {
/// Create a new "fact context" in which to evaluate facts.
pub fn new(
memory_types: &'a PrimaryMap<ir::MemoryType, ir::MemoryTypeData>,
pointer_width: u16,
) -> Self {
pub fn new(function: &'a ir::Function, pointer_width: u16) -> Self {
FactContext {
memory_types,
function,
pointer_width,
}
}
@ -170,6 +230,14 @@ impl<'a> FactContext<'a> {
// Reflexivity.
(l, r) if l == r => true,
// Any value on the LHS subsumes a minimal (always-true)
// fact about the max value of a given bitwidth on the
// RHS: e.g., no matter the value, the bottom 8 bits will
// always be <= 255.
(_, Fact::ValueMax { bit_width, max }) if *max == max_value_for_width(*bit_width) => {
true
}
(
Fact::ValueMax {
bit_width: bw_lhs,
@ -195,6 +263,18 @@ impl<'a> FactContext<'a> {
}
}
/// Computes whether the optional fact `lhs` subsumes (implies)
/// the optional fact `lhs`. A `None` never subsumes any fact, and
/// is always subsumed by any fact at all (or no fact).
pub fn subsumes_fact_optionals(&self, lhs: Option<&Fact>, rhs: Option<&Fact>) -> bool {
match (lhs, rhs) {
(None, None) => true,
(Some(_), None) => true,
(None, Some(_)) => false,
(Some(lhs), Some(rhs)) => self.subsumes(lhs, rhs),
}
}
/// Computes whatever fact can be known about the sum of two
/// values with attached facts. The add is performed to the given
/// bit-width. Note that this is distinct from the machine or
@ -213,6 +293,7 @@ impl<'a> FactContext<'a> {
},
) if bw_lhs == bw_rhs && add_width >= *bw_lhs => {
let computed_max = lhs.checked_add(*rhs)?;
let computed_max = std::cmp::min(max_value_for_width(add_width), computed_max);
Some(Fact::ValueMax {
bit_width: *bw_lhs,
max: computed_max,
@ -352,7 +433,10 @@ impl<'a> FactContext<'a> {
/// Check that accessing memory via a pointer with this fact, with
/// a memory access of the given size, is valid.
pub fn check_address(&self, fact: &Fact, size: u32) -> PccResult<()> {
///
/// If valid, returns the memory type and offset into that type
/// that this address accesses.
fn check_address(&self, fact: &Fact, size: u32) -> PccResult<(ir::MemoryType, i64)> {
match fact {
Fact::Mem { ty, offset } => {
let end_offset: i64 = offset
@ -360,17 +444,70 @@ impl<'a> FactContext<'a> {
.ok_or(PccError::Overflow)?;
let end_offset: u64 =
u64::try_from(end_offset).map_err(|_| PccError::OutOfBounds)?;
match &self.memory_types[*ty] {
match &self.function.memory_types[*ty] {
ir::MemoryTypeData::Struct { size, .. }
| ir::MemoryTypeData::Memory { size } => {
ensure!(end_offset <= *size, OutOfBounds)
}
ir::MemoryTypeData::Empty => bail!(OutOfBounds),
}
Ok((*ty, *offset))
}
_ => bail!(OutOfBounds),
}
}
/// Get the access struct field, if any, by a pointer with the
/// given fact and an access of the given type.
pub fn struct_field<'b>(
&'b self,
fact: &Fact,
access_ty: ir::Type,
) -> PccResult<Option<&'b ir::MemoryTypeField>> {
let (ty, offset) = self.check_address(fact, access_ty.bytes())?;
let offset =
u64::try_from(offset).expect("valid access address cannot have a negative offset");
if let ir::MemoryTypeData::Struct { fields, .. } = &self.function.memory_types[ty] {
let field = fields
.iter()
.find(|field| field.offset == offset)
.ok_or(PccError::InvalidFieldOffset)?;
if field.ty != access_ty {
bail!(BadFieldType);
}
Ok(Some(field))
} else {
// Access to valid memory, but not a struct: no facts can be attached to the result.
Ok(None)
}
}
/// Check a load, and determine what fact, if any, the result of the load might have.
pub fn load<'b>(&'b self, fact: &Fact, access_ty: ir::Type) -> PccResult<Option<&'b Fact>> {
Ok(self
.struct_field(fact, access_ty)?
.and_then(|field| field.fact())
.or_else(|| Fact::infer_from_type(access_ty)))
}
/// Check a store.
pub fn store(
&self,
fact: &Fact,
access_ty: ir::Type,
data_fact: Option<&Fact>,
) -> PccResult<()> {
if let Some(field) = self.struct_field(fact, access_ty)? {
// If it's a read-only field, disallow.
if field.readonly {
bail!(WriteToReadOnlyField);
}
// Check that the fact on the stored data subsumes the field's fact.
if !self.subsumes_fact_optionals(data_fact, field.fact()) {
bail!(InvalidStoredFact);
}
}
Ok(())
}
}
@ -391,15 +528,39 @@ pub fn check_vcode_facts<B: LowerBackend + TargetIsa>(
vcode: &VCode<B::MInst>,
backend: &B,
) -> PccResult<()> {
for inst in 0..vcode.num_insts() {
let inst = InsnIndex::new(inst);
let ctx = FactContext::new(f, backend.triple().pointer_width().unwrap().bits().into());
// Check that individual instructions are valid according to input
// facts, and support the stated output facts.
for block in 0..vcode.num_blocks() {
let block = BlockIndex::new(block);
for inst in vcode.block_insns(block).iter() {
if vcode.inst_defines_facts(inst) || vcode[inst].is_mem_access() {
// This instruction defines a register with a new fact, or
// has some side-effect we want to be careful to
// verify. We'll call into the backend to validate this
// fact with respect to the instruction and the input
// facts.
backend.check_fact(&vcode[inst], f, vcode)?;
backend.check_fact(&ctx, vcode, &vcode[inst])?;
}
// If this is a branch, check that all block arguments subsume
// the assumed facts on the blockparams of successors.
if vcode.is_branch(inst) {
for (succ_idx, succ) in vcode.block_succs(block).iter().enumerate() {
for (arg, param) in vcode
.branch_blockparams(block, inst, succ_idx)
.iter()
.zip(vcode.block_params(*succ).iter())
{
let arg_fact = vcode.vreg_fact(*arg);
let param_fact = vcode.vreg_fact(*param);
if !ctx.subsumes_fact_optionals(arg_fact, param_fact) {
return Err(PccError::UnsupportedBlockparam);
}
}
}
}
}
}
Ok(())

11
cranelift/codegen/src/isa/aarch64/inst/args.rs

@ -647,6 +647,17 @@ impl ScalarSize {
ScalarSize::Size128 => ScalarSize::Size64,
}
}
/// Return a type with the same size as this scalar.
pub fn ty(&self) -> Type {
match self {
ScalarSize::Size8 => I8,
ScalarSize::Size16 => I16,
ScalarSize::Size32 => I32,
ScalarSize::Size64 => I64,
ScalarSize::Size128 => I128,
}
}
}
/// Type used to communicate the size of a vector operand.

10
cranelift/codegen/src/isa/aarch64/lower.rs

@ -8,9 +8,9 @@
//! - Floating-point immediates (FIMM instruction).
use crate::ir::condcodes::{FloatCC, IntCC};
use crate::ir::pcc::PccResult;
use crate::ir::pcc::{FactContext, PccResult};
use crate::ir::Inst as IRInst;
use crate::ir::{Function, Opcode, Value};
use crate::ir::{Opcode, Value};
use crate::isa::aarch64::inst::*;
use crate::isa::aarch64::pcc;
use crate::isa::aarch64::AArch64Backend;
@ -133,10 +133,10 @@ impl LowerBackend for AArch64Backend {
fn check_fact(
&self,
inst: &Self::MInst,
function: &Function,
ctx: &FactContext<'_>,
vcode: &VCode<Self::MInst>,
inst: &Self::MInst,
) -> PccResult<()> {
pcc::check(inst, function, vcode)
pcc::check(ctx, vcode, inst)
}
}

203
cranelift/codegen/src/isa/aarch64/pcc.rs

@ -1,7 +1,8 @@
//! Proof-carrying code checking for AArch64 VCode.
use crate::ir::pcc::*;
use crate::ir::{Function, MemFlags};
use crate::ir::types::*;
use crate::ir::MemFlags;
use crate::isa::aarch64::inst::args::{PairAMode, ShiftOp};
use crate::isa::aarch64::inst::ALUOp;
use crate::isa::aarch64::inst::Inst;
@ -68,10 +69,7 @@ fn check_output<F: Fn() -> PccResult<Fact>>(
}
}
pub(crate) fn check(inst: &Inst, function: &Function, vcode: &VCode<Inst>) -> PccResult<()> {
// Create a new fact context with the machine's pointer width.
let ctx = FactContext::new(&function.memory_types, 64);
pub(crate) fn check(ctx: &FactContext, vcode: &VCode<Inst>, inst: &Inst) -> PccResult<()> {
trace!("Checking facts on inst: {:?}", inst);
match inst {
@ -81,60 +79,50 @@ pub(crate) fn check(inst: &Inst, function: &Function, vcode: &VCode<Inst>) -> Pc
// facts given to us in the CLIF should still be true.
Ok(())
}
Inst::ULoad8 { mem, flags, .. } | Inst::SLoad8 { mem, flags, .. } => {
check_addr(&ctx, *flags, mem, vcode, 1)
Inst::ULoad8 { rd, mem, flags } | Inst::SLoad8 { rd, mem, flags } => {
check_load(&ctx, Some(rd.to_reg()), *flags, mem, vcode, I8)
}
Inst::ULoad16 { rd, mem, flags } | Inst::SLoad16 { rd, mem, flags } => {
check_load(&ctx, Some(rd.to_reg()), *flags, mem, vcode, I16)
}
Inst::ULoad16 { mem, flags, .. } | Inst::SLoad16 { mem, flags, .. } => {
check_addr(&ctx, *flags, mem, vcode, 2)
Inst::ULoad32 { rd, mem, flags } | Inst::SLoad32 { rd, mem, flags } => {
check_load(&ctx, Some(rd.to_reg()), *flags, mem, vcode, I32)
}
Inst::ULoad32 { mem, flags, .. } | Inst::SLoad32 { mem, flags, .. } => {
check_addr(&ctx, *flags, mem, vcode, 4)
Inst::ULoad64 { rd, mem, flags } => {
check_load(&ctx, Some(rd.to_reg()), *flags, mem, vcode, I64)
}
Inst::ULoad64 { mem, flags, .. } => check_addr(&ctx, *flags, mem, vcode, 8),
Inst::FpuLoad32 { mem, flags, .. } => check_addr(&ctx, *flags, mem, vcode, 4),
Inst::FpuLoad64 { mem, flags, .. } => check_addr(&ctx, *flags, mem, vcode, 8),
Inst::FpuLoad128 { mem, flags, .. } => check_addr(&ctx, *flags, mem, vcode, 16),
Inst::LoadP64 { mem, flags, .. } => check_addr_pair(&ctx, *flags, mem, vcode, 16),
Inst::FpuLoadP64 { mem, flags, .. } => check_addr_pair(&ctx, *flags, mem, vcode, 16),
Inst::FpuLoadP128 { mem, flags, .. } => check_addr_pair(&ctx, *flags, mem, vcode, 32),
Inst::VecLoadReplicate { rn, flags, .. } => check_scalar_addr(&ctx, *flags, *rn, vcode, 16),
Inst::FpuLoad32 { mem, flags, .. } => check_load(&ctx, None, *flags, mem, vcode, F32),
Inst::FpuLoad64 { mem, flags, .. } => check_load(&ctx, None, *flags, mem, vcode, F64),
Inst::FpuLoad128 { mem, flags, .. } => check_load(&ctx, None, *flags, mem, vcode, I8X16),
Inst::LoadP64 { mem, flags, .. } => check_load_pair(&ctx, *flags, mem, vcode, 16),
Inst::FpuLoadP64 { mem, flags, .. } => check_load_pair(&ctx, *flags, mem, vcode, 16),
Inst::FpuLoadP128 { mem, flags, .. } => check_load_pair(&ctx, *flags, mem, vcode, 32),
Inst::VecLoadReplicate {
rn, flags, size, ..
} => check_load_addr(&ctx, *flags, *rn, vcode, size.lane_size().ty()),
Inst::LoadAcquire {
access_ty,
rn,
flags,
..
} => check_scalar_addr(
&ctx,
*flags,
*rn,
vcode,
u8::try_from(access_ty.bytes()).unwrap(),
),
} => check_load_addr(&ctx, *flags, *rn, vcode, *access_ty),
// TODO: stores: once we have memcaps, check that we aren't
// overwriting a field that has a pointee type.
Inst::Store8 { mem, flags, .. } => check_addr(&ctx, *flags, mem, vcode, 1),
Inst::Store16 { mem, flags, .. } => check_addr(&ctx, *flags, mem, vcode, 2),
Inst::Store32 { mem, flags, .. } => check_addr(&ctx, *flags, mem, vcode, 4),
Inst::Store64 { mem, flags, .. } => check_addr(&ctx, *flags, mem, vcode, 8),
Inst::FpuStore32 { mem, flags, .. } => check_addr(&ctx, *flags, mem, vcode, 4),
Inst::FpuStore64 { mem, flags, .. } => check_addr(&ctx, *flags, mem, vcode, 8),
Inst::FpuStore128 { mem, flags, .. } => check_addr(&ctx, *flags, mem, vcode, 16),
Inst::StoreP64 { mem, flags, .. } => check_addr_pair(&ctx, *flags, mem, vcode, 16),
Inst::FpuStoreP64 { mem, flags, .. } => check_addr_pair(&ctx, *flags, mem, vcode, 16),
Inst::FpuStoreP128 { mem, flags, .. } => check_addr_pair(&ctx, *flags, mem, vcode, 32),
Inst::Store8 { rd, mem, flags } => check_store(&ctx, Some(*rd), *flags, mem, vcode, I8),
Inst::Store16 { rd, mem, flags } => check_store(&ctx, Some(*rd), *flags, mem, vcode, I16),
Inst::Store32 { rd, mem, flags } => check_store(&ctx, Some(*rd), *flags, mem, vcode, I32),
Inst::Store64 { rd, mem, flags } => check_store(&ctx, Some(*rd), *flags, mem, vcode, I64),
Inst::FpuStore32 { mem, flags, .. } => check_store(&ctx, None, *flags, mem, vcode, F32),
Inst::FpuStore64 { mem, flags, .. } => check_store(&ctx, None, *flags, mem, vcode, F64),
Inst::FpuStore128 { mem, flags, .. } => check_store(&ctx, None, *flags, mem, vcode, I8X16),
Inst::StoreP64 { mem, flags, .. } => check_store_pair(&ctx, *flags, mem, vcode, 16),
Inst::FpuStoreP64 { mem, flags, .. } => check_store_pair(&ctx, *flags, mem, vcode, 16),
Inst::FpuStoreP128 { mem, flags, .. } => check_store_pair(&ctx, *flags, mem, vcode, 32),
Inst::StoreRelease {
access_ty,
rn,
flags,
..
} => check_scalar_addr(
&ctx,
*flags,
*rn,
vcode,
u8::try_from(access_ty.bytes()).unwrap(),
),
} => check_store_addr(&ctx, *flags, *rn, vcode, *access_ty),
Inst::AluRRR {
alu_op: ALUOp::Add,
@ -241,12 +229,63 @@ pub(crate) fn check(inst: &Inst, function: &Function, vcode: &VCode<Inst>) -> Pc
}
}
fn check_addr(
/// The operation we're checking against an amode: either
///
/// - a *load*, and we need to validate that the field's fact subsumes
/// the load result's fact, OR
///
/// - a *store*, and we need to validate that the stored data's fact
/// subsumes the field's fact.
enum LoadOrStore<'a> {
Load { result_fact: Option<&'a Fact> },
Store { stored_fact: Option<&'a Fact> },
}
fn check_load(
ctx: &FactContext,
rd: Option<Reg>,
flags: MemFlags,
addr: &AMode,
vcode: &VCode<Inst>,
ty: Type,
) -> PccResult<()> {
let result_fact = rd.map(|rd| get_fact(vcode, rd)).transpose()?;
check_addr(
ctx,
flags,
addr,
vcode,
ty,
LoadOrStore::Load { result_fact },
)
}
fn check_store(
ctx: &FactContext,
rd: Option<Reg>,
flags: MemFlags,
addr: &AMode,
vcode: &VCode<Inst>,
size: u8,
ty: Type,
) -> PccResult<()> {
let stored_fact = rd.map(|rd| get_fact(vcode, rd)).transpose()?;
check_addr(
ctx,
flags,
addr,
vcode,
ty,
LoadOrStore::Store { stored_fact },
)
}
fn check_addr<'a>(
ctx: &FactContext,
flags: MemFlags,
addr: &AMode,
vcode: &VCode<Inst>,
ty: Type,
op: LoadOrStore<'a>,
) -> PccResult<()> {
if !flags.checked() {
return Ok(());
@ -254,19 +293,36 @@ fn check_addr(
trace!("check_addr: {:?}", addr);
let check = |addr: &Fact, ty: Type| -> PccResult<()> {
match op {
LoadOrStore::Load { result_fact } => {
let loaded_fact = ctx.load(addr, ty)?;
trace!(
"checking a load: loaded_fact = {loaded_fact:?} result_fact = {result_fact:?}"
);
if ctx.subsumes_fact_optionals(loaded_fact, result_fact) {
Ok(())
} else {
Err(PccError::UnsupportedFact)
}
}
LoadOrStore::Store { stored_fact } => ctx.store(addr, ty, stored_fact),
}
};
match addr {
&AMode::RegReg { rn, rm } => {
let rn = get_fact(vcode, rn)?;
let rm = get_fact(vcode, rm)?;
let sum = fail_if_missing(ctx.add(&rn, &rm, 64))?;
ctx.check_address(&sum, size.into())
check(&sum, ty)
}
&AMode::RegScaled { rn, rm, ty } => {
let rn = get_fact(vcode, rn)?;
let rm = get_fact(vcode, rm)?;
let rm_scaled = fail_if_missing(ctx.scale(&rm, 64, ty.bytes()))?;
let sum = fail_if_missing(ctx.add(&rn, &rm_scaled, 64))?;
ctx.check_address(&sum, size.into())
check(&sum, ty)
}
&AMode::RegScaledExtended {
rn,
@ -279,19 +335,22 @@ fn check_addr(
let rm_extended = fail_if_missing(extend_fact(ctx, rm, extendop))?;
let rm_scaled = fail_if_missing(ctx.scale(&rm_extended, 64, ty.bytes()))?;
let sum = fail_if_missing(ctx.add(&rn, &rm_scaled, 64))?;
ctx.check_address(&sum, size.into())
check(&sum, ty)
}
&AMode::RegExtended { rn, rm, extendop } => {
let rn = get_fact(vcode, rn)?;
let rm = get_fact(vcode, rm)?;
let rm_extended = fail_if_missing(extend_fact(ctx, rm, extendop))?;
let sum = fail_if_missing(ctx.add(&rn, &rm_extended, 64))?;
ctx.check_address(&sum, size.into())
trace!("rn = {rn:?} rm = {rm:?} rm_extended = {rm_extended:?} sum = {sum:?}");
check(&sum, ty)?;
trace!(" -> checks out!");
Ok(())
}
&AMode::Unscaled { rn, simm9 } => {
let rn = get_fact(vcode, rn)?;
let sum = fail_if_missing(ctx.offset(&rn, 64, simm9.value.into()))?;
ctx.check_address(&sum, size.into())
check(&sum, ty)
}
&AMode::UnsignedOffset { rn, uimm12 } => {
let rn = get_fact(vcode, rn)?;
@ -299,12 +358,12 @@ fn check_addr(
// most 32 or 64 for large vector ops, and the `uimm12`'s
// value is at most 4095.
let uimm12: u64 = uimm12.value.into();
let offset: u64 = uimm12.checked_mul(size.into()).unwrap();
let offset: u64 = uimm12.checked_mul(ty.bytes().into()).unwrap();
// This `unwrap()` will always succeed because the value
// will always be positive and much smaller than
// `i64::MAX` (see above).
let sum = fail_if_missing(ctx.offset(&rn, 64, i64::try_from(offset).unwrap()))?;
ctx.check_address(&sum, size.into())
check(&sum, ty)
}
&AMode::Label { .. } | &AMode::Const { .. } => {
// Always accept: labels and constants must be within the
@ -314,7 +373,7 @@ fn check_addr(
&AMode::RegOffset { rn, off, .. } => {
let rn = get_fact(vcode, rn)?;
let sum = fail_if_missing(ctx.offset(&rn, 64, off))?;
ctx.check_address(&sum, size.into())
check(&sum, ty)
}
&AMode::SPOffset { .. }
| &AMode::FPOffset { .. }
@ -328,7 +387,7 @@ fn check_addr(
}
}
fn check_addr_pair(
fn check_load_pair(
_ctx: &FactContext,
_flags: MemFlags,
_addr: &PairAMode,
@ -338,16 +397,42 @@ fn check_addr_pair(
Err(PccError::UnimplementedInst)
}
fn check_scalar_addr(
fn check_store_pair(
_ctx: &FactContext,
_flags: MemFlags,
_addr: &PairAMode,
_vcode: &VCode<Inst>,
_size: u8,
) -> PccResult<()> {
Err(PccError::UnimplementedInst)
}
fn check_load_addr(
ctx: &FactContext,
flags: MemFlags,
reg: Reg,
vcode: &VCode<Inst>,
ty: Type,
) -> PccResult<()> {
if !flags.checked() {
return Ok(());
}
let fact = get_fact(vcode, reg)?;
let _output_fact = ctx.load(fact, ty)?;
Ok(())
}
fn check_store_addr(
ctx: &FactContext,
flags: MemFlags,
reg: Reg,
vcode: &VCode<Inst>,
size: u8,
ty: Type,
) -> PccResult<()> {
if !flags.checked() {
return Ok(());
}
let fact = get_fact(vcode, reg)?;
ctx.check_address(fact, size.into())
let _output_fact = ctx.store(fact, ty, None)?;
Ok(())
}

6
cranelift/codegen/src/machinst/lower.rs

@ -8,7 +8,7 @@
use crate::entity::SecondaryMap;
use crate::fx::{FxHashMap, FxHashSet};
use crate::inst_predicates::{has_lowering_side_effect, is_constant_64bit};
use crate::ir::pcc::{PccError, PccResult};
use crate::ir::pcc::{FactContext, PccError, PccResult};
use crate::ir::{
ArgumentPurpose, Block, Constant, ConstantData, DataFlowGraph, ExternalName, Function,
GlobalValue, GlobalValueData, Immediate, Inst, InstructionData, MemFlags, RelSourceLoc, Type,
@ -151,9 +151,9 @@ pub trait LowerBackend {
/// on VRegs.
fn check_fact(
&self,
_inst: &Self::MInst,
_function: &Function,
_ctx: &FactContext<'_>,
_vcode: &VCode<Self::MInst>,
_inst: &Self::MInst,
) -> PccResult<()> {
Err(PccError::UnimplementedBackend)
}

10
cranelift/codegen/src/machinst/vcode.rs

@ -1290,7 +1290,13 @@ impl<I: VCodeInst> VCode<I> {
/// Get the fact, if any, for a given VReg.
pub fn vreg_fact(&self, vreg: VReg) -> Option<&Fact> {
let vreg = self.resolve_vreg_alias(vreg);
self.facts[vreg.vreg()].as_ref()
match &self.facts[vreg.vreg()] {
// The vreg has a stated fact -- return that.
Some(fact) => Some(fact),
// The vreg has no fact, but maybe we can infer a minimal
// one (e.g., an integer range) from the type.
None => Fact::infer_from_type(self.vreg_types[vreg.vreg()]),
}
}
/// Does a given instruction define any facts?
@ -1299,7 +1305,7 @@ impl<I: VCodeInst> VCode<I> {
.iter()
.filter(|o| o.kind() == OperandKind::Def)
.map(|o| o.vreg())
.any(|vreg| self.vreg_fact(vreg).is_some())
.any(|vreg| self.facts[vreg.vreg()].is_some())
}
}

19
cranelift/filetests/filetests/pcc/fail/blockparams.clif

@ -0,0 +1,19 @@
test compile expect-fail
set enable_pcc=true
target aarch64
function %f0(i64, i32) -> i64 {
block0(v0 ! max(64, 0x100): i64, v1: i32):
v2 ! max(64, 0x100) = iconst.i64 0x100
v3 ! max(64, 0x200) = iadd v0, v2
brif v1, block1(v0), block2(v3)
block1(v4 ! max(64, 0xff): i64): ;; shrink the range -- should be caught
jump block3(v4)
block2(v5 ! max(64, 0x1ff): i64):
jump block3(v5)
block3(v6 ! max(64, 1): i64):
return v6
}

19
cranelift/filetests/filetests/pcc/fail/struct.clif

@ -0,0 +1,19 @@
test compile expect-fail
set enable_pcc=true
target aarch64
function %f0(i64) -> i64 {
mt0 = struct 8 { 0: i64 ! mem(mt1, 0) }
mt1 = memory 0x1_0000_0000
block0(v0 ! mem(mt0, 0): i64):
v1 ! mem(mt1, 8) = load.i64 checked v0
return v1
}
function %f1(i64, i64) {
mt0 = struct 8 { 0: i64 ! mem(mt1, 0) }
mt1 = memory 0x1_0000_0000
block0(v0 ! mem(mt0, 0): i64, v1 ! mem(mt1, 8): i64):
store.i64 checked v1, v0
return
}

37
cranelift/filetests/filetests/pcc/fail/vmctx.clif

@ -0,0 +1,37 @@
test compile expect-fail
set enable_pcc=true
target aarch64
;; Equivalent to a Wasm `i64.load` from a static memory.
function %f0(i64, i32) -> i64 {
;; mock vmctx struct:
mt0 = struct 8 { 0: i64 readonly ! mem(mt1, 0) }
;; mock static memory: 4GiB range, *but insufficient guard*
mt1 = memory 0x1_0000_0000
block0(v0 ! mem(mt0, 0): i64, v1: i32):
;; Compute the address: base + offset. Guard region (2GiB) is
;; sufficient for an 8-byte I64 load.
v2 ! mem(mt1, 0) = load.i64 checked v0+0 ;; base pointer
v3 ! max(64, 0xffff_ffff) = uextend.i64 v1 ;; offset
v4 ! mem(mt1, 0xffff_ffff) = iadd.i64 v2, v3
v5 = load.i64 checked v4
return v5
}
;; Equivalent to a Wasm `i64.load` from a static memory.
function %f1(i64, i32) -> i64 {
;; mock vmctx struct:
mt0 = struct 16 { 0: i64 readonly ! mem(mt1, 0), 8: i64 readonly }
;; mock static memory: 4GiB range, *but insufficient guard*
mt1 = memory 0x1_8000_0000
block0(v0 ! mem(mt0, 0): i64, v1: i32):
;; Compute the address: base + offset. Guard region (2GiB) is
;; sufficient for an 8-byte I64 load.
v2 ! mem(mt1, 0) = load.i64 checked v0+8 ;; base pointer, but the wrong one
v3 ! max(64, 0xffff_ffff) = uextend.i64 v1 ;; offset
v4 ! mem(mt1, 0xffff_ffff) = iadd.i64 v2, v3
v5 = load.i64 checked v4
return v5
}

19
cranelift/filetests/filetests/pcc/succeed/blockparams.clif

@ -0,0 +1,19 @@
test compile
set enable_pcc=true
target aarch64
function %f0(i64, i32) -> i64 {
block0(v0 ! max(64, 0x100): i64, v1: i32):
v2 ! max(64, 0x100) = iconst.i64 0x100
v3 ! max(64, 0x200) = iadd v0, v2
brif v1, block1(v0), block2(v3)
block1(v4 ! max(64, 0x1000): i64): ;; broaden the range -- always allowed
jump block3(v4)
block2(v5 ! max(64, 0x2000): i64):
jump block3(v5)
block3(v6 ! max(64, 0x2001): i64):
return v6
}

19
cranelift/filetests/filetests/pcc/succeed/struct.clif

@ -0,0 +1,19 @@
test compile
set enable_pcc=true
target aarch64
function %f0(i64) -> i64 {
mt0 = struct 8 { 0: i64 ! mem(mt1, 0) }
mt1 = memory 0x1_0000_0000
block0(v0 ! mem(mt0, 0): i64):
v1 ! mem(mt1, 0) = load.i64 checked v0
return v1
}
function %f1(i64, i64) {
mt0 = struct 8 { 0: i64 ! mem(mt1, 0) }
mt1 = memory 0x1_0000_0000
block0(v0 ! mem(mt0, 0): i64, v1 ! mem(mt1, 0): i64):
store.i64 checked v1, v0
return
}

20
cranelift/filetests/filetests/pcc/succeed/vmctx.clif

@ -0,0 +1,20 @@
test compile
set enable_pcc=true
target aarch64
;; Equivalent to a Wasm `i64.load` from a static memory.
function %f0(i64, i32) -> i64 {
;; mock vmctx struct:
mt0 = struct 8 { 0: i64 readonly ! mem(mt1, 0) }
;; mock static memory: 4GiB range, plus 2GiB guard
mt1 = memory 0x1_8000_0000
block0(v0 ! mem(mt0, 0): i64, v1: i32):
;; Compute the address: base + offset. Guard region (2GiB) is
;; sufficient for an 8-byte I64 load.
v2 ! mem(mt1, 0) = load.i64 checked v0+0 ;; base pointer
v3 ! max(64, 0xffff_ffff) = uextend.i64 v1 ;; offset
v4 ! mem(mt1, 0xffff_ffff) = iadd.i64 v2, v3
v5 = load.i64 checked v4
return v5
}
Loading…
Cancel
Save