Skip to content

Commit

Permalink
wip
Browse files Browse the repository at this point in the history
  • Loading branch information
alexcrichton committed Oct 30, 2024
1 parent a2025f4 commit 61b4773
Show file tree
Hide file tree
Showing 9 changed files with 452 additions and 478 deletions.
431 changes: 211 additions & 220 deletions crates/cranelift/src/func_environ.rs

Large diffs are not rendered by default.

14 changes: 8 additions & 6 deletions crates/cranelift/src/translate/code_translator/bounds_checks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,10 @@ where
let pcc = env.proof_carrying_code();

let host_page_size_log2 = env.target_config().page_size_align_log2;
let can_use_virtual_memory =
heap.page_size_log2 >= host_page_size_log2 && env.signals_based_traps();
let can_elide_bounds_check = heap.page_size_log2 >= host_page_size_log2
&& env.signals_based_traps()
&& heap.attempt_bounds_check_elision;
// let can_use_virtual_memory =

let make_compare = |builder: &mut FunctionBuilder,
compare_kind: IntCC,
Expand Down Expand Up @@ -193,7 +195,7 @@ where
// multiple fields in the same struct that is in linear memory --
// will all emit the same `index > bound` check, which we can GVN.
HeapStyle::Dynamic { bound_gv }
if can_use_virtual_memory && offset_and_size <= heap.offset_guard_size =>
if can_use_virtual_memory && offset_and_size <= heap.guard_size =>
{
let bound = get_dynamic_heap_bound(builder, env, heap);
let oob = make_compare(
Expand Down Expand Up @@ -371,7 +373,7 @@ where
if can_use_virtual_memory
&& heap.index_type == ir::types::I32
&& u64::from(u32::MAX)
<= u64::from(bound) + u64::from(heap.offset_guard_size) - offset_and_size =>
<= u64::from(bound) + u64::from(heap.guard_size) - offset_and_size =>
{
assert!(
can_use_virtual_memory,
Expand All @@ -385,7 +387,7 @@ where
offset,
AddrPcc::static32(
heap.memory_type,
u64::from(bound) + u64::from(heap.offset_guard_size),
u64::from(bound) + u64::from(heap.guard_size),
),
))
}
Expand Down Expand Up @@ -604,7 +606,7 @@ fn explicit_check_oob_condition_and_compute_addr<FE: FuncEnvironment + ?Sized>(
min: Expr::constant(0),
max: Expr::offset(
&Expr::global_value(gv),
i64::try_from(heap.offset_guard_size)
i64::try_from(heap.guard_size)
.unwrap()
.checked_sub(i64::from(access_size))
.unwrap(),
Expand Down
46 changes: 26 additions & 20 deletions crates/cranelift/src/translate/heap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,10 +72,16 @@ pub struct HeapData {
pub max_size: Option<u64>,

/// Size in bytes of the offset-guard pages following the heap.
pub offset_guard_size: u64,
pub guard_size: u64,

/// Heap style, with additional style-specific info.
pub style: HeapStyle,
/// TODO
pub attempt_bounds_check_elision: bool,

/// TODO
pub memory_reservation: u64,

/// TODO
pub bound_gv: GlobalValue,

/// The index type for the heap.
pub index_type: Type,
Expand All @@ -87,21 +93,21 @@ pub struct HeapData {
pub page_size_log2: u8,
}

/// Style of heap including style-specific information.
#[derive(Clone, PartialEq, Hash)]
pub enum HeapStyle {
/// A dynamic heap can be relocated to a different base address when it is
/// grown.
Dynamic {
/// Global value providing the current bound of the heap in bytes.
bound_gv: GlobalValue,
},
// /// Style of heap including style-specific information.
// #[derive(Clone, PartialEq, Hash)]
// pub enum HeapStyle {
// /// A dynamic heap can be relocated to a different base address when it is
// /// grown.
// Dynamic {
// /// Global value providing the current bound of the heap in bytes.
// bound_gv: GlobalValue,
// },

/// A static heap has a fixed base address and a number of not-yet-allocated
/// pages before the offset-guard pages.
Static {
/// Heap bound in bytes. The offset-guard pages are allocated after the
/// bound.
bound: u64,
},
}
// /// A static heap has a fixed base address and a number of not-yet-allocated
// /// pages before the offset-guard pages.
// Static {
// /// Heap bound in bytes. The offset-guard pages are allocated after the
// /// bound.
// bound: u64,
// },
// }
2 changes: 1 addition & 1 deletion crates/cranelift/src/translate/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ mod translation_utils;

pub use self::environ::{FuncEnvironment, GlobalVariable, StructFieldsVec, TargetEnvironment};
pub use self::func_translator::FuncTranslator;
pub use self::heap::{Heap, HeapData, HeapStyle};
pub use self::heap::{Heap, HeapData};
pub use self::state::FuncTranslationState;
pub use self::table::{TableData, TableSize};
pub use self::translation_utils::*;
53 changes: 18 additions & 35 deletions crates/environ/src/compile/module_environ.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use crate::module::{
FuncRefIndex, Initializer, MemoryInitialization, MemoryInitializer, MemoryPlan, Module,
TablePlan, TableSegment, TableSegmentElements,
FuncRefIndex, Initializer, MemoryInitialization, MemoryInitializer, Module, TableSegment,
TableSegmentElements,
};
use crate::prelude::*;
use crate::{
Expand Down Expand Up @@ -343,13 +343,12 @@ impl<'a, 'data> ModuleEnvironment<'a, 'data> {
Payload::TableSection(tables) => {
self.validator.table_section(&tables)?;
let cnt = usize::try_from(tables.count()).unwrap();
self.result.module.table_plans.reserve_exact(cnt);
self.result.module.tables.reserve_exact(cnt);

for entry in tables {
let wasmparser::Table { ty, init } = entry?;
let table = self.convert_table_type(&ty)?;
let plan = TablePlan::for_table(table, &self.tunables);
self.result.module.table_plans.push(plan);
self.result.module.tables.push(table);
let init = match init {
wasmparser::TableInit::RefNull => TableInitialValue::Null {
precomputed: Vec::new(),
Expand All @@ -374,12 +373,11 @@ impl<'a, 'data> ModuleEnvironment<'a, 'data> {
self.validator.memory_section(&memories)?;

let cnt = usize::try_from(memories.count()).unwrap();
self.result.module.memory_plans.reserve_exact(cnt);
self.result.module.memories.reserve_exact(cnt);

for entry in memories {
let memory = entry?;
let plan = MemoryPlan::for_memory(memory.into(), &self.tunables);
self.result.module.memory_plans.push(plan);
self.result.module.memories.push(memory.into());
}
}

Expand Down Expand Up @@ -767,14 +765,8 @@ and for re-adding support for interface types you can see this issue:
self.flag_func_escaped(func_index);
func_index
}),
EntityType::Table(ty) => {
let plan = TablePlan::for_table(ty, &self.tunables);
EntityIndex::Table(self.result.module.table_plans.push(plan))
}
EntityType::Memory(ty) => {
let plan = MemoryPlan::for_memory(ty, &self.tunables);
EntityIndex::Memory(self.result.module.memory_plans.push(plan))
}
EntityType::Table(ty) => EntityIndex::Table(self.result.module.tables.push(ty)),
EntityType::Memory(ty) => EntityIndex::Memory(self.result.module.memories.push(ty)),
EntityType::Global(ty) => EntityIndex::Global(self.result.module.globals.push(ty)),
EntityType::Tag(_) => unimplemented!(),
}
Expand Down Expand Up @@ -924,8 +916,8 @@ impl ModuleTranslation<'_> {
// wasm module.
segments: Vec<(usize, StaticMemoryInitializer)>,
}
let mut info = PrimaryMap::with_capacity(self.module.memory_plans.len());
for _ in 0..self.module.memory_plans.len() {
let mut info = PrimaryMap::with_capacity(self.module.memories.len());
for _ in 0..self.module.memories.len() {
info.push(Memory {
data_size: 0,
min_addr: u64::MAX,
Expand All @@ -944,16 +936,11 @@ impl ModuleTranslation<'_> {
&mut self,
memory_index: MemoryIndex,
) -> Result<u64, SizeOverflow> {
self.module.memory_plans[memory_index]
.memory
.minimum_byte_size()
self.module.memories[memory_index].minimum_byte_size()
}

fn eval_offset(&mut self, memory_index: MemoryIndex, expr: &ConstExpr) -> Option<u64> {
match (
expr.ops(),
self.module.memory_plans[memory_index].memory.idx_type,
) {
match (expr.ops(), self.module.memories[memory_index].idx_type) {
(&[ConstOp::I32Const(offset)], IndexType::I32) => {
Some(offset.unsigned().into())
}
Expand Down Expand Up @@ -1006,7 +993,7 @@ impl ModuleTranslation<'_> {
// initializer can be created. This can be handled technically but
// would require some more changes to help fix the assert elsewhere
// that this protects against.
if self.module.memory_plans[i].memory.page_size() < page_size {
if self.module.memories[i].page_size() < page_size {
return;
}

Expand Down Expand Up @@ -1141,19 +1128,19 @@ impl ModuleTranslation<'_> {

// First convert any element-initialized tables to images of just that
// single function if the minimum size of the table allows doing so.
for ((_, init), (_, plan)) in self
for ((_, init), (_, table)) in self
.module
.table_initialization
.initial_values
.iter_mut()
.zip(
self.module
.table_plans
.tables
.iter()
.skip(self.module.num_imported_tables),
)
{
let table_size = plan.table.limits.min;
let table_size = table.limits.min;
if table_size > MAX_FUNC_TABLE_SIZE {
continue;
}
Expand Down Expand Up @@ -1206,16 +1193,12 @@ impl ModuleTranslation<'_> {
Some(top) => top,
None => break,
};
let table_size = self.module.table_plans[segment.table_index]
.table
.limits
.min;
let table_size = self.module.tables[segment.table_index].limits.min;
if top > table_size || top > MAX_FUNC_TABLE_SIZE {
break;
}

match self.module.table_plans[segment.table_index]
.table
match self.module.tables[segment.table_index]
.ref_type
.heap_type
.top()
Expand Down
4 changes: 2 additions & 2 deletions crates/environ/src/component/translate/inline.rs
Original file line number Diff line number Diff line change
Expand Up @@ -966,8 +966,8 @@ impl<'a> Inliner<'a> {
Some(memory) => match &self.runtime_instances[memory.instance] {
InstanceModule::Static(idx) => match &memory.item {
ExportItem::Index(i) => {
let plan = &self.nested_modules[*idx].module.memory_plans[*i];
match plan.memory.idx_type {
let memory = &self.nested_modules[*idx].module.memories[*i];
match memory.idx_type {
IndexType::I32 => false,
IndexType::I64 => true,
}
Expand Down
Loading

0 comments on commit 61b4773

Please sign in to comment.