Skip to content

Commit

Permalink
fuzzgen: Add support for stack slot alignment (#8650)
Browse files Browse the repository at this point in the history
  • Loading branch information
afonso360 authored May 22, 2024
1 parent 1bb55c5 commit c0fda8c
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 10 deletions.
3 changes: 3 additions & 0 deletions cranelift/fuzzgen/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ pub struct Config {
pub static_stack_slots_per_function: RangeInclusive<usize>,
/// Size in bytes
pub static_stack_slot_size: RangeInclusive<usize>,
/// Stack slot alignment as a power of 2
pub stack_slot_alignment_log2: RangeInclusive<usize>,
/// Allowed stack probe sizes
pub stack_probe_size_log2: RangeInclusive<usize>,

Expand Down Expand Up @@ -86,6 +88,7 @@ impl Default for Config {
switch_max_range_size: 2..=32,
static_stack_slots_per_function: 0..=8,
static_stack_slot_size: 0..=128,
stack_slot_alignment_log2: 0..=10,
// We need the mix of sizes that allows us to:
// * not generates any stack probes
// * generate unrolled stack probes
Expand Down
30 changes: 20 additions & 10 deletions cranelift/fuzzgen/src/function_generator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ fn insert_stack_load(
) -> Result<()> {
let typevar = rets[0];
let type_size = typevar.bytes();
let (slot, slot_size, category) = fgen.stack_slot_with_size(type_size)?;
let (slot, slot_size, _align, category) = fgen.stack_slot_with_size(type_size)?;

// `stack_load` doesn't support setting MemFlags, and it does not set any
// alias analysis bits, so we can only emit it for `Other` slots.
Expand All @@ -159,7 +159,7 @@ fn insert_stack_store(
let typevar = args[0];
let type_size = typevar.bytes();

let (slot, slot_size, category) = fgen.stack_slot_with_size(type_size)?;
let (slot, slot_size, _align, category) = fgen.stack_slot_with_size(type_size)?;

// `stack_store` doesn't support setting MemFlags, and it does not set any
// alias analysis bits, so we can only emit it for `Other` slots.
Expand Down Expand Up @@ -1185,6 +1185,8 @@ impl AACategory {
}
}

pub type StackAlignment = StackSize;

#[derive(Default)]
struct Resources {
vars: HashMap<Type, Vec<Variable>>,
Expand All @@ -1195,7 +1197,7 @@ struct Resources {
/// This field is required to be sorted by stack slot size at all times.
/// We use this invariant when searching for stack slots with a given size.
/// See [FunctionGenerator::stack_slot_with_size]
stack_slots: Vec<(StackSlot, StackSize, AACategory)>,
stack_slots: Vec<(StackSlot, StackSize, StackAlignment, AACategory)>,
usercalls: Vec<(UserExternalName, Signature)>,
libcalls: Vec<LibCall>,
}
Expand Down Expand Up @@ -1278,11 +1280,14 @@ where
}

/// Finds a stack slot with size of at least n bytes
fn stack_slot_with_size(&mut self, n: u32) -> Result<(StackSlot, StackSize, AACategory)> {
fn stack_slot_with_size(
&mut self,
n: u32,
) -> Result<(StackSlot, StackSize, StackAlignment, AACategory)> {
let first = self
.resources
.stack_slots
.partition_point(|&(_slot, size, _category)| size < n);
.partition_point(|&(_slot, size, _align, _category)| size < n);
Ok(*self.u.choose(&self.resources.stack_slots[first..])?)
}

Expand All @@ -1307,7 +1312,7 @@ where
// TODO: Currently our only source of addresses is stack_addr, but we
// should add global_value, symbol_value eventually
let (addr, available_size, category) = {
let (ss, slot_size, category) = self.stack_slot_with_size(min_size)?;
let (ss, slot_size, _align, category) = self.stack_slot_with_size(min_size)?;

// stack_slot_with_size guarantees that slot_size >= min_size
let max_offset = slot_size - min_size;
Expand Down Expand Up @@ -1605,18 +1610,23 @@ where
fn generate_stack_slots(&mut self, builder: &mut FunctionBuilder) -> Result<()> {
for _ in 0..self.param(&self.config.static_stack_slots_per_function)? {
let bytes = self.param(&self.config.static_stack_slot_size)? as u32;
let ss_data = StackSlotData::new(StackSlotKind::ExplicitSlot, bytes, 0);
let alignment = self.param(&self.config.stack_slot_alignment_log2)? as u8;
let alignment_bytes = 1 << alignment;

let ss_data = StackSlotData::new(StackSlotKind::ExplicitSlot, bytes, alignment);
let slot = builder.create_sized_stack_slot(ss_data);

// Generate one Alias Analysis Category for each slot
let category = *self.u.choose(AACategory::all())?;

self.resources.stack_slots.push((slot, bytes, category));
self.resources
.stack_slots
.push((slot, bytes, alignment_bytes, category));
}

self.resources
.stack_slots
.sort_unstable_by_key(|&(_slot, bytes, _category)| bytes);
.sort_unstable_by_key(|&(_slot, bytes, _align, _category)| bytes);

Ok(())
}
Expand All @@ -1629,7 +1639,7 @@ where
let i64_zero = builder.ins().iconst(I64, 0);
let i128_zero = builder.ins().uextend(I128, i64_zero);

for &(slot, init_size, category) in self.resources.stack_slots.iter() {
for &(slot, init_size, _align, category) in self.resources.stack_slots.iter() {
let mut size = init_size;

// Insert the largest available store for the remaining size.
Expand Down

0 comments on commit c0fda8c

Please sign in to comment.