Skip to content

Commit

Permalink
Plumb wasm memory types further down in Winch
Browse files Browse the repository at this point in the history
This commit plumbs `wasmtime_environ::Memory` further down the stack in
Winch to where heaps are processed. This avoids an extra layer of
indirection through a `Heap` type which peels apart a `Memory` to pick
out a few fields. In the future this'll be used with more helpers on
`Memory` to simplify the static/dynamic memory cases.
  • Loading branch information
alexcrichton committed Nov 1, 2024
1 parent d2a19e0 commit 6675311
Show file tree
Hide file tree
Showing 7 changed files with 64 additions and 99 deletions.
13 changes: 13 additions & 0 deletions crates/environ/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1756,6 +1756,19 @@ impl Memory {
IndexType::I32 => WASM32_MAX_SIZE,
}
}

/// Returns the static size of this heap in bytes at runtime, if available.
///
/// This is only computable when the minimum size equals the maximum size.
pub fn static_heap_size(&self) -> Option<u64> {
let min = self.minimum_byte_size().ok()?;
let max = self.maximum_byte_size().ok()?;
if min == max {
Some(min)
} else {
None
}
}
}

#[derive(Copy, Clone, Debug)]
Expand Down
15 changes: 7 additions & 8 deletions winch/codegen/src/codegen/bounds.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
//! Exposes heap bounds checks functionality for WebAssembly.
//! Bounds checks in WebAssembly are critical for safety, so extreme caution is
//! recommended when working on this area of Winch.
use super::env::{HeapData, HeapStyle};
use super::env::HeapData;
use crate::{
abi::{scratch, vmctx},
codegen::CodeGenContext,
isa::reg::{writable, Reg},
masm::{IntCmpKind, MacroAssembler, OperandSize, RegImm, TrapCode},
stack::TypedReg,
};
use wasmtime_environ::Signed;

/// A newtype to represent an immediate offset argument for a heap access.
#[derive(Debug, Copy, Clone)]
Expand Down Expand Up @@ -90,12 +91,11 @@ where
M: MacroAssembler,
{
let dst = context.any_gpr(masm);
match (heap.max_size, &heap.style) {
match heap.memory.static_heap_size() {
// Constant size, no need to perform a load.
(Some(max_size), HeapStyle::Dynamic) if heap.min_size == max_size => {
masm.mov(writable!(dst), RegImm::i64(max_size as i64), ptr_size)
}
(_, HeapStyle::Dynamic) => {
Some(size) => masm.mov(writable!(dst), RegImm::i64(size.signed()), ptr_size),

None => {
let scratch = scratch!(M);
let base = if let Some(offset) = heap.import_from {
let addr = masm.address_at_vmctx(offset);
Expand All @@ -107,10 +107,9 @@ where
let addr = masm.address_at_reg(base, heap.current_length_offset);
masm.load_ptr(addr, writable!(dst));
}
(_, HeapStyle::Static { .. }) => unreachable!("Loading dynamic bounds of a static heap"),
}

Bounds::from_typed_reg(TypedReg::new(heap.ty, dst))
Bounds::from_typed_reg(TypedReg::new(heap.index_type(), dst))
}

/// This function ensures the following:
Expand Down
81 changes: 15 additions & 66 deletions winch/codegen/src/codegen/env.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@ use std::collections::{
use std::mem;
use wasmparser::BlockType;
use wasmtime_environ::{
BuiltinFunctionIndex, FuncIndex, GlobalIndex, Memory, MemoryIndex, MemoryStyle,
ModuleTranslation, ModuleTypesBuilder, PrimaryMap, PtrSize, Table, TableIndex, Tunables,
TypeConvert, TypeIndex, VMOffsets, WasmHeapType, WasmValType,
BuiltinFunctionIndex, FuncIndex, GlobalIndex, IndexType, Memory, MemoryIndex,
ModuleTranslation, ModuleTypesBuilder, PrimaryMap, PtrSize, Table, TableIndex, TypeConvert,
TypeIndex, VMOffsets, WasmHeapType, WasmValType,
};

#[derive(Debug, Clone, Copy)]
Expand Down Expand Up @@ -42,20 +42,6 @@ pub struct TableData {
pub(crate) current_elements_size: OperandSize,
}

/// Style of the heap.
#[derive(Debug, Copy, Clone)]
pub enum HeapStyle {
/// Static heap, which has a fixed address.
Static {
/// The heap bound in bytes, not including the bytes for the offset
/// guard pages.
bound: u64,
},
/// Dynamic heap, which can be relocated to a different address when grown.
/// The bounds are calculated at runtime on every access.
Dynamic,
}

/// Heap metadata.
///
/// Heaps represent a WebAssembly linear memory.
Expand All @@ -71,19 +57,17 @@ pub struct HeapData {
/// If the WebAssembly memory is imported, this field contains the offset to locate the
/// base of the heap.
pub import_from: Option<u32>,
/// The memory type (32 or 64).
pub ty: WasmValType,
/// The style of the heap.
pub style: HeapStyle,
/// The guaranteed minimum size, in bytes.
pub min_size: u64,
/// The maximum heap size in bytes.
pub max_size: Option<u64>,
/// The log2 of this memory's page size, in bytes.
///
/// By default the page size is 64KiB (0x10000; 2**16; 1<<16; 65536) but the
/// custom-page-sizes proposal allows opting into a page size of `1`.
pub page_size_log2: u8,
/// The memory type this heap is associated with.
pub memory: Memory,
}

impl HeapData {
pub fn index_type(&self) -> WasmValType {
match self.memory.idx_type {
IndexType::I32 => WasmValType::I32,
IndexType::I64 => WasmValType::I64,
}
}
}

/// A function callee.
Expand Down Expand Up @@ -114,8 +98,6 @@ pub struct FuncEnv<'a, 'translation: 'a, 'data: 'translation, P: PtrSize> {
pub types: &'translation ModuleTypesBuilder,
/// The built-in functions available to the JIT code.
pub builtins: &'translation mut BuiltinFunctions,
/// Configurable code generation options.
tunables: &'translation Tunables,
/// Track resolved table information.
resolved_tables: HashMap<TableIndex, TableData>,
/// Track resolved heap information.
Expand Down Expand Up @@ -151,15 +133,13 @@ impl<'a, 'translation, 'data, P: PtrSize> FuncEnv<'a, 'translation, 'data, P> {
translation: &'translation ModuleTranslation<'data>,
types: &'translation ModuleTypesBuilder,
builtins: &'translation mut BuiltinFunctions,
tunables: &'translation Tunables,
isa: &dyn TargetIsa,
ptr_type: WasmValType,
) -> Self {
Self {
vmoffsets,
translation,
types,
tunables,
resolved_tables: HashMap::new(),
resolved_heaps: HashMap::new(),
resolved_callees: HashMap::new(),
Expand Down Expand Up @@ -292,21 +272,12 @@ impl<'a, 'translation, 'data, P: PtrSize> FuncEnv<'a, 'translation, 'data, P> {
};

let memory = &self.translation.module.memories[index];
let (min_size, max_size) = heap_limits(memory);
let style = heap_style_and_offset_guard_size(memory, self.tunables);

*entry.insert(HeapData {
offset: base_offset,
import_from,
current_length_offset,
style,
ty: match memory.idx_type {
wasmtime_environ::IndexType::I32 => WasmValType::I32,
wasmtime_environ::IndexType::I64 => WasmValType::I64,
},
min_size,
max_size,
page_size_log2: memory.page_size_log2,
memory: *memory,
})
}
}
Expand Down Expand Up @@ -420,25 +391,3 @@ impl<'a, 'data> TypeConverter<'a, 'data> {
Self { translation, types }
}
}

fn heap_style_and_offset_guard_size(memory: &Memory, tunables: &Tunables) -> HeapStyle {
let style = MemoryStyle::for_memory(*memory, tunables);
match style {
MemoryStyle::Static { byte_reservation } => HeapStyle::Static {
bound: byte_reservation,
},

MemoryStyle::Dynamic { .. } => HeapStyle::Dynamic,
}
}

fn heap_limits(memory: &Memory) -> (u64, Option<u64>) {
(
memory.minimum_byte_size().unwrap_or_else(|_| {
// 2^64 as a minimum doesn't fin in a 64 bit integer.
// So in this case, the minimum is clamped to u64::MAX.
u64::MAX
}),
memory.maximum_byte_size().ok(),
)
}
50 changes: 28 additions & 22 deletions winch/codegen/src/codegen/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,20 +8,19 @@ use crate::{
stack::TypedReg,
};
use anyhow::Result;
use cranelift_codegen::{
binemit::CodeOffset,
ir::{RelSourceLoc, SourceLoc},
};
use smallvec::SmallVec;
use wasmparser::{
BinaryReader, FuncValidator, MemArg, Operator, ValidatorResources, VisitOperator,
};
use wasmtime_cranelift::{TRAP_BAD_SIGNATURE, TRAP_TABLE_OUT_OF_BOUNDS};
use wasmtime_environ::{
GlobalIndex, MemoryIndex, PtrSize, TableIndex, Tunables, TypeIndex, WasmHeapType, WasmValType,
FUNCREF_MASK,
};

use cranelift_codegen::{
binemit::CodeOffset,
ir::{RelSourceLoc, SourceLoc},
GlobalIndex, MemoryIndex, MemoryStyle, PtrSize, TableIndex, Tunables, TypeIndex, WasmHeapType,
WasmValType, FUNCREF_MASK,
};
use wasmtime_cranelift::{TRAP_BAD_SIGNATURE, TRAP_TABLE_OUT_OF_BOUNDS};

mod context;
pub(crate) use context::*;
Expand Down Expand Up @@ -585,19 +584,24 @@ where
let memory_index = MemoryIndex::from_u32(memarg.memory);
let heap = self.env.resolve_heap(memory_index);
let index = Index::from_typed_reg(self.context.pop_to_reg(self.masm, None));
let offset =
bounds::ensure_index_and_offset(self.masm, index, memarg.offset, heap.ty.into());
let offset = bounds::ensure_index_and_offset(
self.masm,
index,
memarg.offset,
heap.index_type().into(),
);
let offset_with_access_size = add_offset_and_access_size(offset, access_size);

let addr = match heap.style {
let style = MemoryStyle::for_memory(heap.memory, self.tunables);
let addr = match style {
// == Dynamic Heaps ==

// Account for the general case for dynamic memories. The access is
// out of bounds if:
// * index + offset + access_size overflows
// OR
// * index + offset + access_size > bound
HeapStyle::Dynamic => {
MemoryStyle::Dynamic { .. } => {
let bounds =
bounds::load_dynamic_heap_bounds(&mut self.context, self.masm, &heap, ptr_size);

Expand All @@ -621,7 +625,7 @@ where
self.masm.mov(
writable!(index_offset_and_access_size),
index_reg.into(),
heap.ty.into(),
heap.index_type().into(),
);
// Perform
// index = index + offset + access_size, trapping if the
Expand Down Expand Up @@ -675,7 +679,9 @@ where
// optimizing the work that the compiler has to do until the
// reachability is restored or when reaching the end of the
// function.
HeapStyle::Static { bound } if offset_with_access_size > bound => {
MemoryStyle::Static { byte_reservation }
if offset_with_access_size > byte_reservation =>
{
self.emit_fuel_increment();
self.masm.trap(TrapCode::HEAP_OUT_OF_BOUNDS);
self.context.reachable = false;
Expand Down Expand Up @@ -708,10 +714,10 @@ where
// * If the heap type is 32-bits, the offset is at most u32::MAX, so
// no adjustment is needed as part of
// [bounds::ensure_index_and_offset].
HeapStyle::Static { bound }
if heap.ty == WasmValType::I32
MemoryStyle::Static { byte_reservation }
if heap.index_type() == WasmValType::I32
&& u64::from(u32::MAX)
<= u64::from(bound) + u64::from(self.tunables.memory_guard_size)
<= byte_reservation + u64::from(self.tunables.memory_guard_size)
- offset_with_access_size =>
{
let addr = self.context.any_gpr(self.masm);
Expand All @@ -726,8 +732,8 @@ where
//
// bound - (offset + access_size) cannot wrap, because we already
// checked that (offset + access_size) > bound, above.
HeapStyle::Static { bound } => {
let bounds = Bounds::from_u64(bound);
MemoryStyle::Static { byte_reservation } => {
let bounds = Bounds::from_u64(byte_reservation);
let addr = bounds::load_heap_addr_checked(
self.masm,
&mut self.context,
Expand Down Expand Up @@ -908,14 +914,14 @@ where
.address_at_reg(base, heap_data.current_length_offset);
self.masm.load_ptr(size_addr, writable!(size_reg));
// Emit a shift to get the size in pages rather than in bytes.
let dst = TypedReg::new(heap_data.ty, size_reg);
let pow = heap_data.page_size_log2;
let dst = TypedReg::new(heap_data.index_type(), size_reg);
let pow = heap_data.memory.page_size_log2;
self.masm.shift_ir(
writable!(dst.reg),
pow as u64,
dst.into(),
ShiftKind::ShrU,
heap_data.ty.into(),
heap_data.index_type().into(),
);
self.context.stack.push(dst.into());
}
Expand Down
1 change: 0 additions & 1 deletion winch/codegen/src/isa/aarch64/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,6 @@ impl TargetIsa for Aarch64 {
translation,
types,
builtins,
tunables,
self,
abi::Aarch64ABI::ptr_type(),
);
Expand Down
1 change: 0 additions & 1 deletion winch/codegen/src/isa/x64/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,6 @@ impl TargetIsa for X64 {
translation,
types,
builtins,
tunables,
self,
abi::X64ABI::ptr_type(),
);
Expand Down
2 changes: 1 addition & 1 deletion winch/codegen/src/visitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1653,7 +1653,7 @@ where
// The memory32_grow builtin returns a pointer type, therefore we must
// ensure that the return type is representative of the address space of
// the heap type.
match (self.env.ptr_type(), heap.ty) {
match (self.env.ptr_type(), heap.index_type()) {
(WasmValType::I64, WasmValType::I64) => {}
// When the heap type is smaller than the pointer type, we adjust
// the result of the memory32_grow builtin.
Expand Down

0 comments on commit 6675311

Please sign in to comment.