diff --git a/crates/core/src/index_ty.rs b/crates/core/src/index_ty.rs index c6072147eb..d8b42d22e6 100644 --- a/crates/core/src/index_ty.rs +++ b/crates/core/src/index_ty.rs @@ -1,3 +1,5 @@ +use crate::ValType; + /// The index type used for addressing memories and tables. #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum IndexType { @@ -8,6 +10,14 @@ pub enum IndexType { } impl IndexType { + /// Returns the [`ValType`] associated to `self`. + pub fn ty(&self) -> ValType { + match self { + IndexType::I32 => ValType::I32, + IndexType::I64 => ValType::I64, + } + } + /// Returns `true` if `self` is [`IndexType::I64`]. pub fn is_64(&self) -> bool { matches!(self, Self::I64) diff --git a/crates/core/src/typed.rs b/crates/core/src/typed.rs index 94ab475ff3..93669c0e39 100644 --- a/crates/core/src/typed.rs +++ b/crates/core/src/typed.rs @@ -16,6 +16,10 @@ macro_rules! impl_typed_for { } impl_typed_for! { bool => ValType::I32; + i8 => ValType::I32; + u8 => ValType::I32; + i16 => ValType::I32; + u16 => ValType::I32; i32 => ValType::I32; u32 => ValType::I32; i64 => ValType::I64; diff --git a/crates/wasmi/Cargo.toml b/crates/wasmi/Cargo.toml index eab447b783..09f68083ea 100644 --- a/crates/wasmi/Cargo.toml +++ b/crates/wasmi/Cargo.toml @@ -56,6 +56,7 @@ prefer-btree-collections = [ ] wat = ["dep:wat", "std"] simd = ["wasmi_core/simd", "wasmi_ir/simd", "wasmparser/simd"] +experimental-translator = [] # Enables extra checks performed during Wasmi bytecode execution. # diff --git a/crates/wasmi/src/engine/block_type.rs b/crates/wasmi/src/engine/block_type.rs index 285a661449..284bcd1b6b 100644 --- a/crates/wasmi/src/engine/block_type.rs +++ b/crates/wasmi/src/engine/block_type.rs @@ -1,3 +1,5 @@ +#![cfg_attr(not(feature = "experimental-translator"), allow(dead_code))] // TODO: remove + use crate::{ core::ValType, engine::DedupFuncType, @@ -18,7 +20,7 @@ pub enum BlockTypeInner { /// A block type with no parameters and no results. Empty, /// A block type with no parameters and exactly one result. - Returns, + Returns(ValType), /// A general block type with parameters and results. FuncType(DedupFuncType), } @@ -54,8 +56,8 @@ impl BlockType { } /// Creates a [`BlockType`] with no parameters and a single result type. - fn returns(_return_type: ValType) -> Self { - Self::from_inner(BlockTypeInner::Returns) + fn returns(return_type: ValType) -> Self { + Self::from_inner(BlockTypeInner::Returns(return_type)) } /// Creates a [`BlockType`] with parameters and results. @@ -66,7 +68,7 @@ impl BlockType { /// Returns the number of parameters of the [`BlockType`]. pub fn len_params(&self, engine: &Engine) -> u16 { match &self.inner { - BlockTypeInner::Empty | BlockTypeInner::Returns => 0, + BlockTypeInner::Empty | BlockTypeInner::Returns(_) => 0, BlockTypeInner::FuncType(func_type) => { engine.resolve_func_type(func_type, FuncType::len_params) } @@ -77,10 +79,19 @@ impl BlockType { pub fn len_results(&self, engine: &Engine) -> u16 { match &self.inner { BlockTypeInner::Empty => 0, - BlockTypeInner::Returns => 1, + BlockTypeInner::Returns(_) => 1, BlockTypeInner::FuncType(func_type) => { engine.resolve_func_type(func_type, FuncType::len_results) } } } + + /// Applies `f` to `self`'s [`FuncType`] and returns the result. + pub fn func_type_with(&self, engine: &Engine, f: impl for<'a> FnOnce(&FuncType) -> R) -> R { + match &self.inner { + BlockTypeInner::Empty => f(&FuncType::new([], [])), + BlockTypeInner::Returns(return_type) => f(&FuncType::new([], [*return_type])), + BlockTypeInner::FuncType(func_type) => engine.resolve_func_type(func_type, f), + } + } } diff --git a/crates/wasmi/src/engine/translator/error.rs b/crates/wasmi/src/engine/translator/error.rs index ca7598706d..b3ed065bec 100644 --- a/crates/wasmi/src/engine/translator/error.rs +++ b/crates/wasmi/src/engine/translator/error.rs @@ -30,6 +30,8 @@ pub enum TranslationError { TooManyFunctionResults, /// Tried to define a function with too many function parameters. TooManyFunctionParams, + /// Tried to define a function with too many local variables. + TooManyLocalVariables, /// The function failed to compiled lazily. LazyCompilationFailed, } @@ -99,6 +101,9 @@ impl Display for TranslationError { Self::TooManyFunctionParams => { write!(f, "encountered function with too many function parameters") } + Self::TooManyLocalVariables => { + write!(f, "encountered function with too many local variables") + } Self::LazyCompilationFailed => { write!( f, diff --git a/crates/wasmi/src/engine/translator/func2/instrs.rs b/crates/wasmi/src/engine/translator/func2/instrs.rs new file mode 100644 index 0000000000..717936c17f --- /dev/null +++ b/crates/wasmi/src/engine/translator/func2/instrs.rs @@ -0,0 +1,388 @@ +use super::{Reset, ReusableAllocations}; +use crate::{ + core::{FuelCostsProvider, ValType}, + engine::translator::{ + comparator::{ + CmpSelectFusion, + CompareResult as _, + TryIntoCmpSelectInstr as _, + UpdateBranchOffset as _, + }, + func2::{Operand, Stack, StackLayout, StackSpace}, + relink_result::RelinkResult, + utils::{BumpFuelConsumption as _, Instr, IsInstructionParameter as _}, + }, + ir::{BranchOffset, Instruction, Reg}, + module::ModuleHeader, + Engine, + Error, +}; +use alloc::vec::{self, Vec}; + +/// Creates and encodes the list of [`Instruction`]s for a function. +#[derive(Debug, Default)] +pub struct InstrEncoder { + /// The list of constructed instructions and their parameters. + instrs: Vec, + /// The fuel costs of instructions. + /// + /// This is `Some` if fuel metering is enabled, otherwise `None`. + fuel_costs: Option, + /// The last pushed non-parameter [`Instruction`]. + last_instr: Option, +} + +impl ReusableAllocations for InstrEncoder { + type Allocations = InstrEncoderAllocations; + + fn into_allocations(self) -> Self::Allocations { + Self::Allocations { + instrs: self.instrs, + } + } +} + +/// The reusable heap allocations of the [`InstrEncoder`]. +#[derive(Debug, Default)] +pub struct InstrEncoderAllocations { + /// The list of constructed instructions and their parameters. + instrs: Vec, +} + +impl Reset for InstrEncoderAllocations { + fn reset(&mut self) { + self.instrs.clear(); + } +} + +impl InstrEncoder { + /// Creates a new [`InstrEncoder`]. + pub fn new(engine: &Engine, alloc: InstrEncoderAllocations) -> Self { + let config = engine.config(); + let fuel_costs = config + .get_consume_fuel() + .then(|| config.fuel_costs()) + .cloned(); + Self { + instrs: alloc.instrs, + fuel_costs, + last_instr: None, + } + } + + /// Returns the next [`Instr`]. + #[must_use] + pub fn next_instr(&self) -> Instr { + Instr::from_usize(self.instrs.len()) + } + + /// Pushes an [`Instruction::ConsumeFuel`] instruction to `self`. + /// + /// # Note + /// + /// The pushes [`Instruction::ConsumeFuel`] is initialized with base fuel costs. + pub fn push_consume_fuel_instr(&mut self) -> Result, Error> { + let Some(fuel_costs) = &self.fuel_costs else { + return Ok(None); + }; + let base_costs = fuel_costs.base(); + let Ok(base_costs) = u32::try_from(base_costs) else { + panic!("out of bounds base fuel costs: {base_costs}"); + }; + let instr = self.push_instr_impl(Instruction::consume_fuel(base_costs))?; + Ok(Some(instr)) + } + + /// Pushes a non-parameter [`Instruction`] to the [`InstrEncoder`]. + /// + /// Returns an [`Instr`] that refers to the pushed [`Instruction`]. + pub fn push_instr( + &mut self, + instruction: Instruction, + consume_fuel: Option, + f: impl FnOnce(&FuelCostsProvider) -> u64, + ) -> Result { + self.bump_fuel_consumption(consume_fuel, f)?; + self.push_instr_impl(instruction) + } + + /// Pushes a non-parameter [`Instruction`] to the [`InstrEncoder`]. + fn push_instr_impl(&mut self, instruction: Instruction) -> Result { + debug_assert!( + !instruction.is_instruction_parameter(), + "parameter: {instruction:?}" + ); + let instr = self.next_instr(); + self.instrs.push(instruction); + self.last_instr = Some(instr); + Ok(instr) + } + + /// Replaces `instr` with `new_instr` in `self`. + /// + /// - Returns `Ok(true)` if replacement was successful. + /// - Returns `Ok(false)` if replacement was unsuccessful. + /// + /// # Panics (Debug) + /// + /// If `instr` or `new_instr` are [`Instruction`] parameters. + pub fn try_replace_instr( + &mut self, + instr: Instr, + new_instr: Instruction, + ) -> Result { + debug_assert!( + !new_instr.is_instruction_parameter(), + "parameter: {new_instr:?}" + ); + let Some(last_instr) = self.last_instr else { + return Ok(false); + }; + let replace = self.get_mut(instr); + debug_assert!(!replace.is_instruction_parameter(), "parameter: {instr:?}"); + if instr != last_instr { + return Ok(false); + } + *replace = new_instr; + Ok(true) + } + + /// Tries to replace the result of the last instruction with `new_result` if possible. + /// + /// # Note + /// + /// - `old_result`: + /// just required for additional safety to check if the last instruction + /// really is the source of the `local.set` or `local.tee`. + /// - `new_result`: + /// the new result which shall replace the `old_result`. + pub fn try_replace_result( + &mut self, + new_result: Reg, + old_result: Reg, + layout: &StackLayout, + module: &ModuleHeader, + ) -> Result { + if !matches!(layout.stack_space(new_result), StackSpace::Local) { + // Case: cannot replace result if `new_result` isn't a local. + return Ok(false); + } + let Some(last_instr) = self.last_instr else { + // Case: cannot replace result without last instruction. + return Ok(false); + }; + if !self + .get_mut(last_instr) + .relink_result(module, new_result, old_result)? + { + // Case: it was impossible to relink the result of `last_instr. + return Ok(false); + } + Ok(true) + } + + /// Tries to fuse a compare instruction with a Wasm `select` instruction. + /// + /// # Returns + /// + /// - Returns `Some` if fusion was successful. + /// - Returns `None` if fusion could not be applied. + pub fn try_fuse_select( + &mut self, + ty: ValType, + select_condition: Reg, + layout: &StackLayout, + stack: &mut Stack, + ) -> Result, Error> { + let Some(last_instr) = self.last_instr else { + // If there is no last instruction there is no comparison instruction to negate. + return Ok(None); + }; + let last_instruction = self.get(last_instr); + let Some(last_result) = last_instruction.compare_result() else { + // All negatable instructions have a single result register. + return Ok(None); + }; + if matches!(layout.stack_space(last_result), StackSpace::Local) { + // The instruction stores its result into a local variable which + // is an observable side effect which we are not allowed to mutate. + return Ok(None); + } + if last_result != select_condition { + // The result of the last instruction and the select's `condition` + // are not equal thus indicating that we cannot fuse the instructions. + return Ok(None); + } + let CmpSelectFusion::Applied { + fused, + swap_operands, + } = last_instruction.try_into_cmp_select_instr(|| { + let select_result = stack.push_temp(ty, Some(last_instr))?; + let select_result = layout.temp_to_reg(select_result)?; + Ok(select_result) + })? + else { + return Ok(None); + }; + let last_instr = self.get_mut(last_instr); + *last_instr = fused; + Ok(Some(swap_operands)) + } + + /// Pushes an [`Instruction`] parameter to the [`InstrEncoder`]. + /// + /// The parameter is associated to the last pushed [`Instruction`]. + pub fn push_param(&mut self, instruction: Instruction) { + self.instrs.push(instruction); + } + + /// Returns a shared reference to the [`Instruction`] associated to [`Instr`]. + /// + /// # Panics + /// + /// If `instr` is out of bounds for `self`. + pub fn get(&self, instr: Instr) -> &Instruction { + &self.instrs[instr.into_usize()] + } + + /// Returns an exclusive reference to the [`Instruction`] associated to [`Instr`]. + /// + /// # Panics + /// + /// If `instr` is out of bounds for `self`. + fn get_mut(&mut self, instr: Instr) -> &mut Instruction { + &mut self.instrs[instr.into_usize()] + } + + /// Updates the branch offset of `instr` to `offset`. + /// + /// # Errors + /// + /// If the branch offset could not be updated for `instr`. + pub fn update_branch_offset( + &mut self, + instr: Instr, + offset: BranchOffset, + layout: &mut StackLayout, + ) -> Result<(), Error> { + self.get_mut(instr).update_branch_offset(layout, offset)?; + Ok(()) + } + + /// Bumps consumed fuel for [`Instruction::ConsumeFuel`] of `instr` by `delta`. + /// + /// # Errors + /// + /// If consumed fuel is out of bounds after this operation. + pub fn bump_fuel_consumption( + &mut self, + consume_fuel: Option, + f: impl FnOnce(&FuelCostsProvider) -> u64, + ) -> Result<(), Error> { + let (fuel_costs, consume_fuel) = match (&self.fuel_costs, consume_fuel) { + (None, None) => return Ok(()), + (Some(fuel_costs), Some(consume_fuel)) => (fuel_costs, consume_fuel), + _ => { + panic!( + "fuel metering state mismatch: fuel_costs: {:?}, fuel_instr: {:?}", + self.fuel_costs, consume_fuel, + ); + } + }; + let fuel_consumed = f(fuel_costs); + self.get_mut(consume_fuel) + .bump_fuel_consumption(fuel_consumed)?; + Ok(()) + } + + /// Encode the top-most `len` operands on the stack as register list. + /// + /// # Note + /// + /// This is used for the following n-ary instructions: + /// + /// - [`Instruction::ReturnMany`] + /// - [`Instruction::CopyMany`] + /// - [`Instruction::CallInternal`] + /// - [`Instruction::CallImported`] + /// - [`Instruction::CallIndirect`] + /// - [`Instruction::ReturnCallInternal`] + /// - [`Instruction::ReturnCallImported`] + /// - [`Instruction::ReturnCallIndirect`] + pub fn encode_register_list( + &mut self, + operands: &[Operand], + layout: &mut StackLayout, + ) -> Result<(), Error> { + let mut remaining = operands; + let mut operand_to_reg = + |operand: &Operand| -> Result { layout.operand_to_reg(*operand) }; + let instr = loop { + match remaining { + [] => return Ok(()), + [v0] => { + let v0 = operand_to_reg(v0)?; + break Instruction::register(v0); + } + [v0, v1] => { + let v0 = operand_to_reg(v0)?; + let v1 = operand_to_reg(v1)?; + break Instruction::register2_ext(v0, v1); + } + [v0, v1, v2] => { + let v0 = operand_to_reg(v0)?; + let v1 = operand_to_reg(v1)?; + let v2 = operand_to_reg(v2)?; + break Instruction::register3_ext(v0, v1, v2); + } + [v0, v1, v2, rest @ ..] => { + let v0 = operand_to_reg(v0)?; + let v1 = operand_to_reg(v1)?; + let v2 = operand_to_reg(v2)?; + let instr = Instruction::register_list_ext(v0, v1, v2); + self.push_param(instr); + remaining = rest; + } + }; + }; + self.push_param(instr); + Ok(()) + } + + /// Returns an iterator yielding all [`Instruction`]s of the [`InstrEncoder`]. + /// + /// # Note + /// + /// The [`InstrEncoder`] will be empty after this operation. + pub fn drain(&mut self) -> InstrEncoderIter<'_> { + InstrEncoderIter { + iter: self.instrs.drain(..), + } + } + + /// Returns the last instruction of the [`InstrEncoder`] if any. + pub fn last_instr(&self) -> Option { + self.last_instr + } +} + +/// Iterator yielding all [`Instruction`]s of the [`InstrEncoder`]. +#[derive(Debug)] +pub struct InstrEncoderIter<'a> { + /// The underlying iterator. + iter: vec::Drain<'a, Instruction>, +} + +impl<'a> Iterator for InstrEncoderIter<'a> { + type Item = Instruction; + + fn next(&mut self) -> Option { + self.iter.next() + } +} + +impl ExactSizeIterator for InstrEncoderIter<'_> { + fn len(&self) -> usize { + self.iter.len() + } +} diff --git a/crates/wasmi/src/engine/translator/func2/layout/consts.rs b/crates/wasmi/src/engine/translator/func2/layout/consts.rs new file mode 100644 index 0000000000..1a0c28b595 --- /dev/null +++ b/crates/wasmi/src/engine/translator/func2/layout/consts.rs @@ -0,0 +1,142 @@ +use super::Reset; +use crate::{core::UntypedVal, engine::TranslationError, ir::Reg, Error}; +use alloc::{ + collections::{btree_map, BTreeMap}, + vec::Vec, +}; +use core::{iter::Rev, slice::Iter as SliceIter}; + +/// A pool of deduplicated function local constant values. +/// +/// - Those constant values are identified by their associated [`Reg`]. +/// - All constant values are also deduplicated so that no duplicates +/// are stored in a [`ConstRegistry`]. This also means that deciding if two +/// [`Reg`] values refer to the equal constant values can be efficiently +/// done by comparing the [`Reg`] indices without resolving to their +/// underlying constant values. +#[derive(Debug, Default)] +pub struct ConstRegistry { + /// Mapping from constant [`UntypedVal`] values to [`Reg`] indices. + const2idx: BTreeMap, + /// Mapping from [`Reg`] indices to constant [`UntypedVal`] values. + idx2const: Vec, + /// The [`Reg`] index for the next allocated function local constant value. + next_idx: i16, +} + +impl Reset for ConstRegistry { + fn reset(&mut self) { + self.const2idx.clear(); + self.idx2const.clear(); + self.next_idx = Self::first_index(); + } +} + +impl ConstRegistry { + /// The maximum index for [`Reg`] referring to function local constant values. + /// + /// # Note + /// + /// The maximum index is also the one to be assigned to the first allocated + /// function local constant value as indices are counting downwards. + fn first_index() -> i16 { + -1 + } + + /// The mininmum index for [`Reg`] referring to function local constant values. + /// + /// # Note + /// + /// This index is not assignable to a function local constant value and acts + /// as a bound to guard against overflowing the range of indices. + fn last_index() -> i16 { + i16::MIN + } + + /// Returns the number of allocated function local constant values. + pub fn len_consts(&self) -> u16 { + self.next_idx.abs_diff(Self::first_index()) + } + + /// Allocates a new constant `value` on the [`ConstRegistry`] and returns its identifier. + /// + /// # Note + /// + /// If the constant `value` already exists in this [`ConstRegistry`] no new value is + /// allocated and the identifier of the existing constant `value` returned instead. + /// + /// # Errors + /// + /// If too many constant values have been allocated for this [`ConstRegistry`]. + pub fn alloc(&mut self, value: UntypedVal) -> Result { + if self.next_idx == Self::last_index() { + return Err(Error::from(TranslationError::TooManyFuncLocalConstValues)); + } + match self.const2idx.entry(value) { + btree_map::Entry::Occupied(entry) => Ok(*entry.get()), + btree_map::Entry::Vacant(entry) => { + let register = Reg::from(self.next_idx); + self.next_idx -= 1; + entry.insert(register); + self.idx2const.push(value); + Ok(register) + } + } + } + + /// Returns the function local constant [`UntypedVal`] of the [`Reg`] if any. + pub fn get(&self, register: Reg) -> Option { + if !register.is_const() { + return None; + } + let index = i16::from(register).wrapping_add(1).unsigned_abs() as usize; + self.idx2const.get(index).copied() + } + + /// Returns an iterator yielding all function local constant values of the [`ConstRegistry`]. + /// + /// # Note + /// + /// The function local constant values are yielded in their allocation order. + pub fn iter(&self) -> ConstRegistryIter<'_> { + ConstRegistryIter::new(self) + } +} + +/// Iterator yielding all allocated function local constant values. +pub struct ConstRegistryIter<'a> { + /// The underlying iterator. + iter: Rev>, +} + +impl<'a> ConstRegistryIter<'a> { + /// Creates a new [`ConstRegistryIter`] from the given slice of [`UntypedVal`]. + pub fn new(consts: &'a ConstRegistry) -> Self { + // Note: we need to revert the iteration since we allocate new + // function local constants in reverse order of their absolute + // vector indices in the function call frame during execution. + Self { + iter: consts.idx2const.as_slice().iter().rev(), + } + } +} + +impl Iterator for ConstRegistryIter<'_> { + type Item = UntypedVal; + + fn next(&mut self) -> Option { + self.iter.next().copied() + } +} + +impl DoubleEndedIterator for ConstRegistryIter<'_> { + fn next_back(&mut self) -> Option { + self.iter.next_back().copied() + } +} + +impl ExactSizeIterator for ConstRegistryIter<'_> { + fn len(&self) -> usize { + self.iter.len() + } +} diff --git a/crates/wasmi/src/engine/translator/func2/layout/mod.rs b/crates/wasmi/src/engine/translator/func2/layout/mod.rs new file mode 100644 index 0000000000..717b851788 --- /dev/null +++ b/crates/wasmi/src/engine/translator/func2/layout/mod.rs @@ -0,0 +1,170 @@ +mod consts; + +use self::consts::{ConstRegistry, ConstRegistryIter}; +use super::{LocalIdx, Operand, OperandIdx, Reset}; +use crate::{ + core::{UntypedVal, ValType}, + engine::{translator::comparator::AllocConst, TranslationError}, + ir::Reg, + Error, +}; + +#[cfg(doc)] +use super::Stack; + +/// The layout of the [`Stack`]. +#[derive(Debug, Default)] +pub struct StackLayout { + /// The number of locals registered to the function. + len_locals: usize, + /// All function local constants. + consts: ConstRegistry, +} + +impl Reset for StackLayout { + fn reset(&mut self) { + self.len_locals = 0; + self.consts.reset(); + } +} + +impl StackLayout { + /// Register `amount` local variables of common type `ty`. + /// + /// # Errors + /// + /// If too many local variables are being registered. + pub fn register_locals(&mut self, amount: u32, _ty: ValType) -> Result<(), Error> { + self.len_locals += amount as usize; + Ok(()) + } + + /// Returns the [`StackSpace`] of the [`Reg`]. + /// + /// Returns `None` if the [`Reg`] is unknown to the [`Stack`]. + #[must_use] + pub fn stack_space(&self, reg: Reg) -> StackSpace { + let index = i16::from(reg); + if index.is_negative() { + return StackSpace::Const; + } + let index = index as u16; + if usize::from(index) < self.len_locals { + return StackSpace::Local; + } + StackSpace::Temp + } + + /// Converts the `operand` into the associated [`Reg`]. + /// + /// # Note + /// + /// Forwards to one of + /// + /// - [`StackLayout::local_to_reg`] + /// - [`StackLayout::temp_to_reg`] + /// - [`StackLayout::const_to_reg`] + /// + /// # Errors + /// + /// If the forwarded method returned an error. + pub fn operand_to_reg(&mut self, operand: Operand) -> Result { + match operand { + Operand::Local(operand) => self.local_to_reg(operand.local_index()), + Operand::Temp(operand) => self.temp_to_reg(operand.operand_index()), + Operand::Immediate(operand) => self.const_to_reg(operand.val()), + } + } + + /// Converts the local `index` into the associated [`Reg`]. + /// + /// # Errors + /// + /// If `index` cannot be converted into a [`Reg`]. + pub fn local_to_reg(&self, index: LocalIdx) -> Result { + debug_assert!( + (u32::from(index) as usize) < self.len_locals, + "out of bounds local operand index: {index:?}" + ); + let Ok(index) = i16::try_from(u32::from(index)) else { + return Err(Error::from(TranslationError::AllocatedTooManyRegisters)); + }; + Ok(Reg::from(index)) + } + + /// Converts the operand `index` into the associated [`Reg`]. + /// + /// # Errors + /// + /// If `index` cannot be converted into a [`Reg`]. + pub fn temp_to_reg(&self, index: OperandIdx) -> Result { + let index = usize::from(index); + let Some(index) = index.checked_add(self.len_locals) else { + return Err(Error::from(TranslationError::AllocatedTooManyRegisters)); + }; + let Ok(index) = i16::try_from(index) else { + return Err(Error::from(TranslationError::AllocatedTooManyRegisters)); + }; + Ok(Reg::from(index)) + } + + /// Allocates a function local constant `value`. + /// + /// # Errors + /// + /// If too many function local constants have been allocated already. + pub fn const_to_reg(&mut self, value: impl Into) -> Result { + self.consts.alloc(value.into()) + } + + /// Returns an iterator yielding all function local constants. + /// + /// # Note + /// + /// The function local constant are yielded in reverse order of allocation. + pub fn consts(&self) -> ConstRegistryIter<'_> { + self.consts.iter() + } + + /// Returns the number of registered locals. + pub fn len_locals(&self) -> usize { + self.len_locals + } +} + +impl AllocConst for StackLayout { + fn alloc_const>(&mut self, value: T) -> Result { + self.const_to_reg(value) + } +} + +/// The [`StackSpace`] of a [`Reg`]. +#[derive(Debug, Copy, Clone)] +pub enum StackSpace { + /// Stack slot referring to a local variable. + Local, + /// Stack slot referring to a function local constant value. + Const, + /// Stack slot referring to a temporary stack operand. + Temp, +} + +impl StackSpace { + /// Returns `true` if `self` is [`StackSpace::Local`]. + #[inline] + pub fn is_local(self) -> bool { + matches!(self, Self::Local) + } + + /// Returns `true` if `self` is [`StackSpace::Temp`]. + #[inline] + pub fn is_temp(self) -> bool { + matches!(self, Self::Temp) + } + + /// Returns `true` if `self` is [`StackSpace::Const`]. + #[inline] + pub fn is_const(self) -> bool { + matches!(self, Self::Const) + } +} diff --git a/crates/wasmi/src/engine/translator/func2/mod.rs b/crates/wasmi/src/engine/translator/func2/mod.rs new file mode 100644 index 0000000000..78f98e6034 --- /dev/null +++ b/crates/wasmi/src/engine/translator/func2/mod.rs @@ -0,0 +1,2578 @@ +#![expect(dead_code)] + +#[macro_use] +mod utils; +mod instrs; +mod layout; +mod op; +#[cfg(feature = "simd")] +mod simd; +mod stack; +mod visit; + +use self::{ + instrs::{InstrEncoder, InstrEncoderAllocations}, + layout::{StackLayout, StackSpace}, + stack::{ + BlockControlFrame, + ControlFrame, + ControlFrameBase, + ControlFrameKind, + ElseControlFrame, + ElseReachability, + IfControlFrame, + IfReachability, + ImmediateOperand, + LocalIdx, + LoopControlFrame, + Operand, + OperandIdx, + Stack, + StackAllocations, + TempOperand, + }, + utils::{Input, Input16, Input32, Reset, ReusableAllocations}, +}; +use crate::{ + core::{FuelCostsProvider, IndexType, TrapCode, Typed, TypedVal, UntypedVal, ValType}, + engine::{ + translator::{ + comparator::{ + CompareResult as _, + LogicalizeCmpInstr, + NegateCmpInstr, + TryIntoCmpBranchInstr as _, + }, + labels::{LabelRef, LabelRegistry}, + utils::{Instr, WasmFloat, WasmInteger, Wrap}, + WasmTranslator, + }, + BlockType, + CompiledFuncEntity, + TranslationError, + }, + ir::{ + index, + Address, + Address32, + AnyConst16, + BoundedRegSpan, + BranchOffset, + BranchOffset16, + Comparator, + ComparatorAndOffset, + Const16, + Const32, + FixedRegSpan, + Instruction, + IntoShiftAmount, + Offset16, + Offset64, + Offset64Lo, + Reg, + RegSpan, + Sign, + }, + module::{FuncIdx, FuncTypeIdx, MemoryIdx, ModuleHeader, TableIdx, WasmiValueType}, + Engine, + Error, + FuncType, +}; +use alloc::vec::Vec; +use core::mem; +use wasmparser::{MemArg, WasmFeatures}; + +/// Type concerned with translating from Wasm bytecode to Wasmi bytecode. +#[derive(Debug)] +pub struct FuncTranslator { + /// The reference to the Wasm module function under construction. + func: FuncIdx, + /// The engine for which the function is compiled. + /// + /// # Note + /// + /// Technically this is not needed since the information is redundant given via + /// the `module` field. However, this acts like a faster access since `module` + /// only holds a weak reference to the engine. + engine: Engine, + /// The immutable Wasmi module resources. + module: ModuleHeader, + /// This represents the reachability of the currently translated code. + /// + /// - `true`: The currently translated code is reachable. + /// - `false`: The currently translated code is unreachable and can be skipped. + /// + /// # Note + /// + /// Visiting the Wasm `Else` or `End` control flow operator resets + /// reachability to `true` again. + reachable: bool, + /// Fuel costs for fuel metering. + /// + /// `None` if fuel metering is disabled. + fuel_costs: Option, + /// Wasm value and control stack. + stack: Stack, + /// Wasm layout to map stack slots to Wasmi registers. + layout: StackLayout, + /// Registers and pins labels and tracks their users. + labels: LabelRegistry, + /// Constructs and encodes function instructions. + instrs: InstrEncoder, + /// Temporary buffer for operands. + operands: Vec, + /// Temporary buffer for immediate values. + immediates: Vec, +} + +/// Heap allocated data structured used by the [`FuncTranslator`]. +#[derive(Debug, Default)] +pub struct FuncTranslatorAllocations { + /// Wasm value and control stack. + stack: StackAllocations, + /// Wasm layout to map stack slots to Wasmi registers. + layout: StackLayout, + /// Registers and pins labels and tracks their users. + labels: LabelRegistry, + /// Constructs and encodes function instructions. + instrs: InstrEncoderAllocations, + /// Temporary buffer for operands. + operands: Vec, + /// Temporary buffer for immediate values. + immediates: Vec, +} + +impl Reset for FuncTranslatorAllocations { + fn reset(&mut self) { + self.stack.reset(); + self.layout.reset(); + self.labels.reset(); + self.instrs.reset(); + self.operands.clear(); + self.immediates.clear(); + } +} + +impl WasmTranslator<'_> for FuncTranslator { + type Allocations = FuncTranslatorAllocations; + + fn setup(&mut self, _bytes: &[u8]) -> Result { + Ok(false) + } + + #[inline] + fn features(&self) -> WasmFeatures { + self.engine.config().wasm_features() + } + + fn translate_locals( + &mut self, + amount: u32, + value_type: wasmparser::ValType, + ) -> Result<(), Error> { + let ty = WasmiValueType::from(value_type).into_inner(); + self.stack.register_locals(amount, ty)?; + self.layout.register_locals(amount, ty)?; + Ok(()) + } + + fn finish_translate_locals(&mut self) -> Result<(), Error> { + Ok(()) + } + + fn update_pos(&mut self, _pos: usize) {} + + fn finish( + mut self, + finalize: impl FnOnce(CompiledFuncEntity), + ) -> Result { + let Some(frame_size) = self.frame_size() else { + return Err(Error::from(TranslationError::AllocatedTooManyRegisters)); + }; + self.update_branch_offsets()?; + finalize(CompiledFuncEntity::new( + frame_size, + self.instrs.drain(), + self.layout.consts(), + )); + Ok(self.into_allocations()) + } +} + +impl ReusableAllocations for FuncTranslator { + type Allocations = FuncTranslatorAllocations; + + fn into_allocations(self) -> Self::Allocations { + Self::Allocations { + stack: self.stack.into_allocations(), + layout: self.layout, + labels: self.labels, + instrs: self.instrs.into_allocations(), + operands: self.operands, + immediates: self.immediates, + } + } +} + +impl FuncTranslator { + /// Creates a new [`FuncTranslator`]. + pub fn new( + func: FuncIdx, + module: ModuleHeader, + alloc: FuncTranslatorAllocations, + ) -> Result { + let Some(engine) = module.engine().upgrade() else { + panic!( + "cannot compile function since engine does no longer exist: {:?}", + module.engine() + ) + }; + let config = engine.config(); + let fuel_costs = config + .get_consume_fuel() + .then(|| config.fuel_costs()) + .cloned(); + let FuncTranslatorAllocations { + stack, + layout, + labels, + instrs, + operands, + immediates, + } = alloc.into_reset(); + let stack = Stack::new(&engine, stack); + let instrs = InstrEncoder::new(&engine, instrs); + let mut translator = Self { + func, + engine, + module, + reachable: true, + fuel_costs, + stack, + layout, + labels, + instrs, + operands, + immediates, + }; + translator.init_func_body_block()?; + translator.init_func_params()?; + Ok(translator) + } + + /// Initializes the function body enclosing control block. + fn init_func_body_block(&mut self) -> Result<(), Error> { + let func_ty = self.module.get_type_of_func(self.func); + let block_ty = BlockType::func_type(func_ty); + let end_label = self.labels.new_label(); + let consume_fuel = self.instrs.push_consume_fuel_instr()?; + self.stack + .push_func_block(block_ty, end_label, consume_fuel)?; + Ok(()) + } + + /// Initializes the function's parameters. + fn init_func_params(&mut self) -> Result<(), Error> { + for ty in self.func_type().params() { + self.stack.register_locals(1, *ty)?; + self.layout.register_locals(1, *ty)?; + } + Ok(()) + } + + /// Returns the frame size of the to-be-compiled function. + /// + /// Returns `None` if the frame size is out of bounds. + fn frame_size(&self) -> Option { + let frame_size = self + .stack + .max_height() + .checked_add(self.layout.len_locals())? + .checked_add(self.layout.consts().len())?; + u16::try_from(frame_size).ok() + } + + /// Updates the branch offsets of all branch instructions inplace. + /// + /// # Panics + /// + /// If this is used before all branching labels have been pinned. + fn update_branch_offsets(&mut self) -> Result<(), Error> { + for (user, offset) in self.labels.resolved_users() { + self.instrs + .update_branch_offset(user, offset?, &mut self.layout)?; + } + Ok(()) + } + + /// Returns the [`FuncType`] of the function that is currently translated. + fn func_type(&self) -> FuncType { + self.func_type_with(FuncType::clone) + } + + /// Applies `f` to the [`FuncType`] of the function that is currently translated. + fn func_type_with(&self, f: impl FnOnce(&FuncType) -> R) -> R { + self.resolve_func_type_with(self.func, f) + } + + /// Returns the [`FuncType`] of the function at `func_index`. + fn resolve_func_type(&self, func_index: FuncIdx) -> FuncType { + self.resolve_func_type_with(func_index, FuncType::clone) + } + + /// Applies `f` to the [`FuncType`] of the function at `func_index`. + fn resolve_func_type_with(&self, func_index: FuncIdx, f: impl FnOnce(&FuncType) -> R) -> R { + let dedup_func_type = self.module.get_type_of_func(func_index); + self.engine().resolve_func_type(dedup_func_type, f) + } + + /// Resolves the [`FuncType`] at the given Wasm module `type_index`. + fn resolve_type(&self, type_index: u32) -> FuncType { + let func_type_idx = FuncTypeIdx::from(type_index); + let dedup_func_type = self.module.get_func_type(func_type_idx); + self.engine() + .resolve_func_type(dedup_func_type, Clone::clone) + } + + /// Returns the [`RegSpan`] of a call instruction before manipulating the operand stack. + fn call_regspan(&self, len_params: usize) -> Result { + let height = self.stack.height(); + let Some(start) = height.checked_sub(len_params) else { + panic!("operand stack underflow while evaluating call `RegSpan`"); + }; + let start = self.layout.temp_to_reg(OperandIdx::from(start))?; + Ok(RegSpan::new(start)) + } + + /// Push `results` as [`TempOperand`] onto the [`Stack`] tagged to `instr`. + /// + /// Returns the [`RegSpan`] identifying the pushed operands if any. + fn push_results( + &mut self, + instr: Instr, + results: &[ValType], + ) -> Result, Error> { + let (first, rest) = match results.split_first() { + Some((first, rest)) => (first, rest), + None => return Ok(None), + }; + let first = self.stack.push_temp(*first, Some(instr))?; + for result in rest { + self.stack.push_temp(*result, Some(instr))?; + } + let start = self.layout.temp_to_reg(first)?; + Ok(Some(RegSpan::new(start))) + } + + /// Returns the [`Engine`] for which the function is compiled. + fn engine(&self) -> &Engine { + &self.engine + } + + /// Returns `true` if fuel metering is enabled. + fn is_fuel_metering_enabled(&self) -> bool { + self.fuel_costs.is_some() + } + + /// Copy the top-most `len` operands to [`Operand::Temp`] values. + /// + /// # Note + /// + /// - The top-most `len` operands on the [`Stack`] will be [`Operand::Temp`] upon completion. + /// - Does nothing if an [`Operand`] is already an [`Operand::Temp`]. + fn move_operands_to_temp( + &mut self, + len: usize, + consume_fuel: Option, + ) -> Result<(), Error> { + for n in 0..len { + let operand = self.stack.operand_to_temp(n); + self.copy_operand_to_temp(operand, consume_fuel)?; + } + Ok(()) + } + + /// Convert all branch params up to `depth` to [`Operand::Temp`]. + /// + /// # Note + /// + /// - The top-most `depth` operands on the [`Stack`] will be [`Operand::Temp`] upon completion. + /// - Does nothing if an [`Operand`] is already an [`Operand::Temp`]. + fn copy_branch_params( + &mut self, + target: &impl ControlFrameBase, + consume_fuel_instr: Option, + ) -> Result<(), Error> { + let len_branch_params = target.len_branch_params(&self.engine); + let Some(branch_results) = self.frame_results(target)? else { + return Ok(()); + }; + self.encode_copies(branch_results, len_branch_params, consume_fuel_instr)?; + Ok(()) + } + + /// Pushes the temporary results of the control `frame` onto the [`Stack`]. + /// + /// # Note + /// + /// - Before pushing the results, the [`Stack`] is truncated to the `frame`'s height. + /// - Not all control frames have temporary results, e.g. Wasm `loop`s, Wasm `if`s with + /// a compile-time known branch or Wasm `block`s that are never branched to, do not + /// require to call this function. + fn push_frame_results(&mut self, frame: &impl ControlFrameBase) -> Result<(), Error> { + let height = frame.height(); + self.stack.trunc(height); + frame + .ty() + .func_type_with(&self.engine, |func_ty| -> Result<(), Error> { + for result in func_ty.results() { + self.stack.push_temp(*result, None)?; + } + Ok(()) + })?; + Ok(()) + } + + /// Encodes a copy instruction for the top-most `len_values` on the stack to `results`. + /// + /// # Note + /// + /// - This does _not_ pop values from the stack or manipulate the stack otherwise. + /// - This might allocate new function local constant values if necessary. + /// - This does _not_ encode a copy if the copy is a no-op. + fn encode_copies( + &mut self, + results: RegSpan, + len_values: u16, + consume_fuel_instr: Option, + ) -> Result<(), Error> { + match len_values { + 0 => Ok(()), + 1 => { + let result = results.head(); + let copy_instr = match self.stack.peek(0) { + Operand::Immediate(operand) => match operand.ty() { + ValType::I32 => { + let value = i32::from(operand.val()); + Instruction::copy_imm32(result, value) + } + ValType::I64 => { + let value = i64::from(operand.val()); + match Const32::try_from(value) { + Ok(value) => Instruction::copy_i64imm32(result, value), + Err(_) => { + let value = self.layout.const_to_reg(value)?; + Instruction::copy(result, value) + } + } + } + ValType::F32 => { + let value = f32::from(operand.val()); + Instruction::copy_imm32(result, value) + } + ValType::F64 => { + let value = f64::from(operand.val()); + match Const32::try_from(value) { + Ok(value) => Instruction::copy_f64imm32(result, value), + Err(_) => { + let value = self.layout.const_to_reg(value)?; + Instruction::copy(result, value) + } + } + } + ValType::V128 | ValType::FuncRef | ValType::ExternRef => { + let value = self.layout.const_to_reg(operand.val())?; + Instruction::copy(result, value) + } + }, + operand => { + let value = self.layout.operand_to_reg(operand)?; + if result == value { + // Case: no-op copy + return Ok(()); + } + Instruction::copy(result, value) + } + }; + self.instrs + .push_instr(copy_instr, consume_fuel_instr, FuelCostsProvider::base)?; + Ok(()) + } + 2 => { + let (fst, snd) = self.stack.peek2(); + let fst = self.layout.operand_to_reg(fst)?; + let snd = self.layout.operand_to_reg(snd)?; + self.instrs.push_instr( + Instruction::copy2_ext(results, fst, snd), + consume_fuel_instr, + FuelCostsProvider::base, + )?; + Ok(()) + } + _ => { + self.instrs + .bump_fuel_consumption(consume_fuel_instr, |costs| { + costs.fuel_for_copying_values(u64::from(len_values)) + })?; + if let Some(values) = self.try_form_regspan(usize::from(len_values))? { + // Case: can encode the copies as a more efficient `copy_span` + if results == values { + // Case: results and values are equal and therefore the copy is a no-op + return Ok(()); + } + debug_assert!(results.head() < values.head()); + self.instrs.push_instr( + Instruction::copy_span(results, values, len_values), + consume_fuel_instr, + FuelCostsProvider::base, + )?; + return Ok(()); + } + self.stack + .peek_n(usize::from(len_values), &mut self.operands); + let [fst, snd, rest @ ..] = &self.operands[..] else { + unreachable!("asserted that operands.len() >= 3") + }; + let fst = self.layout.operand_to_reg(*fst)?; + let snd = self.layout.operand_to_reg(*snd)?; + self.instrs.push_instr( + Instruction::copy_many_ext(results, fst, snd), + consume_fuel_instr, + FuelCostsProvider::base, + )?; + self.instrs.encode_register_list(rest, &mut self.layout)?; + Ok(()) + } + } + } + + /// Returns the results [`RegSpan`] of the `frame` if any. + fn frame_results(&self, frame: &impl ControlFrameBase) -> Result, Error> { + Self::frame_results_impl(frame, &self.engine, &self.layout) + } + + /// Returns the results [`RegSpan`] of the `frame` if any. + fn frame_results_impl( + frame: &impl ControlFrameBase, + engine: &Engine, + layout: &StackLayout, + ) -> Result, Error> { + if frame.len_branch_params(engine) == 0 { + return Ok(None); + } + let height = frame.height(); + let start = layout.temp_to_reg(OperandIdx::from(height))?; + let span = RegSpan::new(start); + Ok(Some(span)) + } + + /// Returns `true` if the [`ControlFrame`] at `depth` requires copying for its branch parameters. + /// + /// # Note + /// + /// Some instructions can be encoded in a more efficient way if no branch parameter copies are required. + fn requires_branch_param_copies(&self, depth: usize) -> bool { + let frame = self.stack.peek_control(depth); + let len_branch_params = usize::from(frame.len_branch_params(&self.engine)); + let frame_height = frame.height(); + let height_matches = frame_height == (self.stack.height() - len_branch_params); + let only_temps = (0..len_branch_params) + .map(|depth| self.stack.peek(depth)) + .all(|o| o.is_temp()); + let can_avoid_copies = height_matches && only_temps; + !can_avoid_copies + } + + /// Pins the `label` to the next [`Instr`]. + fn pin_label(&mut self, label: LabelRef) { + self.labels + .pin_label(label, self.instrs.next_instr()) + .unwrap_or_else(|err| panic!("failed to pin label to next instruction: {err}")); + } + + /// Convert the [`Operand`] at `depth` into an [`Operand::Temp`] by copying if necessary. + /// + /// # Note + /// + /// Does nothing if the [`Operand`] is already an [`Operand::Temp`]. + fn copy_operand_to_temp( + &mut self, + operand: Operand, + consume_fuel: Option, + ) -> Result<(), Error> { + let instr = match operand { + Operand::Temp(_) => return Ok(()), + Operand::Local(operand) => { + let result = self.layout.temp_to_reg(operand.operand_index())?; + let value = self.layout.local_to_reg(operand.local_index())?; + Instruction::copy(result, value) + } + Operand::Immediate(operand) => { + let result = self.layout.temp_to_reg(operand.operand_index())?; + self.make_copy_imm_instr(result, operand.val())? + } + }; + self.instrs + .push_instr(instr, consume_fuel, FuelCostsProvider::base)?; + Ok(()) + } + + /// Returns the copy instruction to copy the given immediate `value`. + fn make_copy_imm_instr(&mut self, result: Reg, value: TypedVal) -> Result { + let instr = match value.ty() { + ValType::I32 => Instruction::copy_imm32(result, i32::from(value)), + ValType::I64 => { + let value = i64::from(value); + match >::try_from(value) { + Ok(value) => Instruction::copy_i64imm32(result, value), + Err(_) => { + let value = self.layout.const_to_reg(value)?; + Instruction::copy(result, value) + } + } + } + ValType::F32 => Instruction::copy_imm32(result, f32::from(value)), + ValType::F64 => { + let value = f64::from(value); + match >::try_from(value) { + Ok(value) => Instruction::copy_f64imm32(result, value), + Err(_) => { + let value = self.layout.const_to_reg(value)?; + Instruction::copy(result, value) + } + } + } + ValType::V128 | ValType::FuncRef | ValType::ExternRef => { + let value = self.layout.const_to_reg(value)?; + Instruction::copy(result, value) + } + }; + Ok(instr) + } + + /// Pushes the `instr` to the function with the associated `fuel_costs`. + fn push_instr( + &mut self, + instr: Instruction, + fuel_costs: impl FnOnce(&FuelCostsProvider) -> u64, + ) -> Result { + let consume_fuel = self.stack.consume_fuel_instr(); + let instr = self.instrs.push_instr(instr, consume_fuel, fuel_costs)?; + Ok(instr) + } + + /// Pushes the `instr` to the function with the associated `fuel_costs`. + fn push_instr_with_result( + &mut self, + result_ty: ValType, + make_instr: impl FnOnce(Reg) -> Instruction, + fuel_costs: impl FnOnce(&FuelCostsProvider) -> u64, + ) -> Result<(), Error> { + let consume_fuel_instr = self.stack.consume_fuel_instr(); + let expected_iidx = self.instrs.next_instr(); + let result = self + .layout + .temp_to_reg(self.stack.push_temp(result_ty, Some(expected_iidx))?)?; + let actual_iidx = + self.instrs + .push_instr(make_instr(result), consume_fuel_instr, fuel_costs)?; + assert_eq!(expected_iidx, actual_iidx); + Ok(()) + } + + /// Pushes a binary instruction with a result and associated fuel costs. + fn push_binary_instr_with_result( + &mut self, + result_ty: ValType, + lhs: Operand, + rhs: Operand, + make_instr: impl FnOnce(Reg, Reg, Reg) -> Instruction, + fuel_costs: impl FnOnce(&FuelCostsProvider) -> u64, + ) -> Result<(), Error> { + debug_assert_eq!(lhs.ty(), rhs.ty()); + let lhs = self.layout.operand_to_reg(lhs)?; + let rhs = self.layout.operand_to_reg(rhs)?; + self.push_instr_with_result(result_ty, |result| make_instr(result, lhs, rhs), fuel_costs) + } + + /// Pushes an instruction parameter `param` to the list of instructions. + fn push_param(&mut self, param: Instruction) -> Result<(), Error> { + self.instrs.push_param(param); + Ok(()) + } + + /// Populate the `buffer` with the `table` targets including the `table` default target. + /// + /// Returns a shared slice to the `buffer` after it has been filled. + /// + /// # Note + /// + /// The `table` default target is pushed last to the `buffer`. + fn copy_targets_from_br_table( + table: &wasmparser::BrTable, + buffer: &mut Vec, + ) -> Result<(), Error> { + let default_target = table.default(); + buffer.clear(); + for target in table.targets() { + buffer.push(TypedVal::from(target?)); + } + buffer.push(TypedVal::from(default_target)); + Ok(()) + } + + /// Encodes a Wasm `br_table` that does not copy branching values. + /// + /// # Note + /// + /// Upon call the `immediates` buffer contains all `br_table` target values. + fn encode_br_table_0(&mut self, table: wasmparser::BrTable, index: Reg) -> Result<(), Error> { + debug_assert_eq!(self.immediates.len(), (table.len() + 1) as usize); + self.push_instr( + Instruction::branch_table_0(index, table.len() + 1), + FuelCostsProvider::base, + )?; + // Encode the `br_table` targets: + let targets = &self.immediates[..]; + for target in targets { + let Ok(depth) = usize::try_from(u32::from(*target)) else { + panic!("out of bounds `br_table` target does not fit `usize`: {target:?}"); + }; + let mut frame = self.stack.peek_control_mut(depth).control_frame(); + let offset = self + .labels + .try_resolve_label(frame.label(), self.instrs.next_instr())?; + self.instrs.push_param(Instruction::branch(offset)); + frame.branch_to(); + } + Ok(()) + } + + /// Encodes a Wasm `br_table` that has to copy `len_values` branching values. + /// + /// # Note + /// + /// Upon call the `immediates` buffer contains all `br_table` target values. + fn encode_br_table_n( + &mut self, + table: wasmparser::BrTable, + index: Reg, + len_values: u16, + ) -> Result<(), Error> { + debug_assert_eq!(self.immediates.len(), (table.len() + 1) as usize); + let consume_fuel_instr = self.stack.consume_fuel_instr(); + let values = self.try_form_regspan_or_move(usize::from(len_values), consume_fuel_instr)?; + self.push_instr( + Instruction::branch_table_span(index, table.len() + 1), + FuelCostsProvider::base, + )?; + self.push_param(Instruction::register_span(BoundedRegSpan::new( + values, len_values, + )))?; + // Encode the `br_table` targets: + let targets = &self.immediates[..]; + for target in targets { + let Ok(depth) = usize::try_from(u32::from(*target)) else { + panic!("out of bounds `br_table` target does not fit `usize`: {target:?}"); + }; + let mut frame = self.stack.peek_control_mut(depth).control_frame(); + let Some(results) = Self::frame_results_impl(&frame, &self.engine, &self.layout)? + else { + panic!("must have frame results since `br_table` requires to copy values"); + }; + let offset = self + .labels + .try_resolve_label(frame.label(), self.instrs.next_instr())?; + self.instrs + .push_param(Instruction::branch_table_target(results, offset)); + frame.branch_to(); + } + Ok(()) + } + + /// Encodes a generic return instruction. + fn encode_return(&mut self, consume_fuel: Option) -> Result { + let len_results = self.func_type_with(FuncType::len_results); + let instr = match len_results { + 0 => Instruction::Return, + 1 => match self.stack.peek(0) { + Operand::Local(operand) => { + let value = self.layout.local_to_reg(operand.local_index())?; + Instruction::return_reg(value) + } + Operand::Temp(operand) => { + let value = self.layout.temp_to_reg(operand.operand_index())?; + Instruction::return_reg(value) + } + Operand::Immediate(operand) => { + let val = operand.val(); + match operand.ty() { + ValType::I32 => Instruction::return_imm32(i32::from(val)), + ValType::I64 => match >::try_from(i64::from(val)) { + Ok(value) => Instruction::return_i64imm32(value), + Err(_) => { + let value = self.layout.const_to_reg(val)?; + Instruction::return_reg(value) + } + }, + ValType::F32 => Instruction::return_imm32(f32::from(val)), + ValType::F64 => match >::try_from(f64::from(val)) { + Ok(value) => Instruction::return_f64imm32(value), + Err(_) => { + let value = self.layout.const_to_reg(val)?; + Instruction::return_reg(value) + } + }, + ValType::V128 | ValType::FuncRef | ValType::ExternRef => { + let value = self.layout.const_to_reg(val)?; + Instruction::return_reg(value) + } + } + } + }, + 2 => { + let (v0, v1) = self.stack.peek2(); + let v0 = self.layout.operand_to_reg(v0)?; + let v1 = self.layout.operand_to_reg(v1)?; + Instruction::return_reg2_ext(v0, v1) + } + 3 => { + let (v0, v1, v2) = self.stack.peek3(); + let v0 = self.layout.operand_to_reg(v0)?; + let v1 = self.layout.operand_to_reg(v1)?; + let v2 = self.layout.operand_to_reg(v2)?; + Instruction::return_reg3_ext(v0, v1, v2) + } + _ => { + let len_values = usize::from(len_results); + match self.try_form_regspan(len_values)? { + Some(span) => { + let values = BoundedRegSpan::new(span, len_results); + Instruction::return_span(values) + } + None => return self.encode_return_many(len_values, consume_fuel), + } + } + }; + let instr = self + .instrs + .push_instr(instr, consume_fuel, FuelCostsProvider::base)?; + Ok(instr) + } + + /// Encodes an [`Instruction::ReturnMany`] for `len` values. + /// + /// # Panics + /// + /// If `len` is not greater than or equal to 4. + fn encode_return_many( + &mut self, + len: usize, + consume_fuel_instr: Option, + ) -> Result { + self.stack.peek_n(len, &mut self.operands); + let [v0, v1, v2, rest @ ..] = &self.operands[..] else { + unreachable!("encode_return_many (pre-condition): len >= 4") + }; + let v0 = self.layout.operand_to_reg(*v0)?; + let v1 = self.layout.operand_to_reg(*v1)?; + let v2 = self.layout.operand_to_reg(*v2)?; + let return_instr = self.instrs.push_instr( + Instruction::return_many_ext(v0, v1, v2), + consume_fuel_instr, + FuelCostsProvider::base, + )?; + self.instrs.encode_register_list(rest, &mut self.layout)?; + Ok(return_instr) + } + + /// Tries to form a [`RegSpan`] from the top-most `len` operands on the [`Stack`]. + /// + /// Returns `None` if forming a [`RegSpan`] was not possible. + fn try_form_regspan(&self, len: usize) -> Result, Error> { + if len == 0 { + return Ok(None); + } + let mut start = match self.stack.peek(0) { + Operand::Immediate(_) => return Ok(None), + Operand::Local(operand) => self.layout.local_to_reg(operand.local_index())?, + Operand::Temp(operand) => self.layout.temp_to_reg(operand.operand_index())?, + }; + for depth in 1..len { + let cur = match self.stack.peek(depth) { + Operand::Immediate(_) => return Ok(None), + Operand::Local(operand) => self.layout.local_to_reg(operand.local_index())?, + Operand::Temp(operand) => self.layout.temp_to_reg(operand.operand_index())?, + }; + if start != cur.next() { + return Ok(None); + } + start = cur; + } + Ok(Some(RegSpan::new(start))) + } + + /// Tries to form a [`RegSpan`] from the top-most `len` operands on the [`Stack`] or copy to temporaries. + /// + /// Returns `None` if forming a [`RegSpan`] was not possible. + fn try_form_regspan_or_move( + &mut self, + len: usize, + consume_fuel_instr: Option, + ) -> Result { + if let Some(span) = self.try_form_regspan(len)? { + return Ok(span); + } + self.move_operands_to_temp(len, consume_fuel_instr)?; + let Some(span) = self.try_form_regspan(len)? else { + unreachable!("the top-most `len` operands are now temporaries thus `RegSpan` forming should succeed") + }; + Ok(span) + } + + /// Translates the end of a Wasm `block` control frame. + fn translate_end_block(&mut self, frame: BlockControlFrame) -> Result<(), Error> { + let consume_fuel_instr = frame.consume_fuel_instr(); + if frame.is_branched_to() { + if self.reachable { + self.copy_branch_params(&frame, consume_fuel_instr)?; + } + self.push_frame_results(&frame)?; + } + if let Err(err) = self + .labels + .pin_label(frame.label(), self.instrs.next_instr()) + { + panic!("failed to pin label: {err}") + } + self.reachable |= frame.is_branched_to(); + if self.reachable && self.stack.is_control_empty() { + self.encode_return(consume_fuel_instr)?; + } + Ok(()) + } + + /// Translates the end of a Wasm `loop` control frame. + fn translate_end_loop(&mut self, _frame: LoopControlFrame) -> Result<(), Error> { + debug_assert!(!self.stack.is_control_empty()); + // Nothing needs to be done since Wasm `loop` control frames always only have a single exit. + Ok(()) + } + + /// Translates the end of a Wasm `if` control frame. + fn translate_end_if(&mut self, frame: IfControlFrame) -> Result<(), Error> { + debug_assert!(!self.stack.is_control_empty()); + let is_end_of_then_reachable = self.reachable; + let IfReachability::Both { else_label } = frame.reachability() else { + let is_end_reachable = match frame.reachability() { + IfReachability::OnlyThen => self.reachable, + IfReachability::OnlyElse => false, + IfReachability::Both { .. } => unreachable!(), + }; + return self.translate_end_if_or_else_only(frame, is_end_reachable); + }; + let len_results = frame.ty().len_results(self.engine()); + let has_results = len_results >= 1; + if is_end_of_then_reachable && has_results { + let consume_fuel_instr = frame.consume_fuel_instr(); + self.copy_branch_params(&frame, consume_fuel_instr)?; + let end_offset = self + .labels + .try_resolve_label(frame.label(), self.instrs.next_instr()) + .unwrap(); + self.instrs.push_instr( + Instruction::branch(end_offset), + consume_fuel_instr, + FuelCostsProvider::base, + )?; + } + self.labels + .try_pin_label(else_label, self.instrs.next_instr()); + self.stack.push_else_operands(&frame)?; + if has_results { + // We haven't visited the `else` block and thus the `else` + // providers are still on the auxiliary stack and need to + // be popped. We use them to restore the stack to the state + // when entering the `if` block so that we can properly copy + // the `else` results to were they are expected. + let consume_fuel_instr = self.instrs.push_consume_fuel_instr()?; + self.copy_branch_params(&frame, consume_fuel_instr)?; + } + self.push_frame_results(&frame)?; + self.labels + .pin_label(frame.label(), self.instrs.next_instr()) + .unwrap(); + self.reachable = true; + Ok(()) + } + + /// Translates the end of a Wasm `else` control frame. + fn translate_end_else(&mut self, frame: ElseControlFrame) -> Result<(), Error> { + debug_assert!(!self.stack.is_control_empty()); + match frame.reachability() { + ElseReachability::OnlyThen { + is_end_of_then_reachable, + } => { + return self.translate_end_if_or_else_only(frame, is_end_of_then_reachable); + } + ElseReachability::OnlyElse => { + return self.translate_end_if_or_else_only(frame, self.reachable); + } + _ => {} + }; + let end_of_then_reachable = frame.is_end_of_then_reachable(); + let end_of_else_reachable = self.reachable; + let reachable = match (end_of_then_reachable, end_of_else_reachable) { + (false, false) => frame.is_branched_to(), + _ => true, + }; + if end_of_else_reachable { + let consume_fuel_instr: Option = frame.consume_fuel_instr(); + self.copy_branch_params(&frame, consume_fuel_instr)?; + } + self.push_frame_results(&frame)?; + self.labels + .pin_label(frame.label(), self.instrs.next_instr()) + .unwrap(); + self.reachable = reachable; + Ok(()) + } + + /// Translates the end of a Wasm `else` control frame where only one branch is known to be reachable. + fn translate_end_if_or_else_only( + &mut self, + frame: impl ControlFrameBase, + end_is_reachable: bool, + ) -> Result<(), Error> { + if frame.is_branched_to() { + if end_is_reachable { + let consume_fuel_instr = frame.consume_fuel_instr(); + self.copy_branch_params(&frame, consume_fuel_instr)?; + } + self.push_frame_results(&frame)?; + } + self.labels + .pin_label(frame.label(), self.instrs.next_instr()) + .unwrap(); + self.reachable = end_is_reachable || frame.is_branched_to(); + Ok(()) + } + + /// Translates the end of an unreachable Wasm control frame. + fn translate_end_unreachable(&mut self, _frame: ControlFrameKind) -> Result<(), Error> { + debug_assert!(!self.stack.is_control_empty()); + Ok(()) + } + + /// Translate the Wasm `local.set` and `local.tee` operations. + /// + /// # Note + /// + /// This applies op-code fusion that replaces the result of the previous instruction + /// instead of encoding a copy instruction for the `local.set` or `local.tee` if possible. + fn translate_local_set(&mut self, local_index: u32, push_result: bool) -> Result<(), Error> { + bail_unreachable!(self); + let input = self.stack.pop(); + if let Operand::Local(input) = input { + if u32::from(input.local_index()) == local_index { + // Case: `(local.set $n (local.get $n))` is a no-op so we can ignore it. + // + // Note: This does not require any preservation since it won't change + // the value of `local $n`. + return Ok(()); + } + } + let local_idx = LocalIdx::from(local_index); + let consume_fuel_instr = self.stack.consume_fuel_instr(); + for preserved in self.stack.preserve_locals(local_idx) { + let result = self.layout.temp_to_reg(preserved)?; + let value = self.layout.local_to_reg(local_idx)?; + self.instrs.push_instr( + Instruction::copy(result, value), + consume_fuel_instr, + FuelCostsProvider::base, + )?; + } + if push_result { + match input { + Operand::Immediate(input) => { + self.stack.push_immediate(input.val())?; + } + _ => { + self.stack.push_local(local_idx)?; + } + } + } + if self.try_replace_result(local_idx, input)? { + // Case: it was possible to replace the result of the previous + // instructions so no copy instruction is required. + return Ok(()); + } + // At this point we need to encode a copy instruction. + let result = self.layout.local_to_reg(local_idx)?; + let instr = match input { + Operand::Immediate(operand) => self.make_copy_imm_instr(result, operand.val())?, + operand => { + let input = self.layout.operand_to_reg(operand)?; + Instruction::copy(result, input) + } + }; + self.instrs + .push_instr(instr, consume_fuel_instr, FuelCostsProvider::base)?; + Ok(()) + } + + /// Tries to replace the result of the previous instruction with `new_result` if possible. + /// + /// Returns `Ok(true)` if replacement was successful and `Ok(false)` otherwise. + fn try_replace_result( + &mut self, + new_result: LocalIdx, + old_result: Operand, + ) -> Result { + let result = self.layout.local_to_reg(new_result)?; + let old_result = match old_result { + Operand::Immediate(_) => { + // Case: cannot replace immediate value result. + return Ok(false); + } + Operand::Local(_) => { + // Case: cannot replace local with another local due to observable behavior. + return Ok(false); + } + Operand::Temp(operand) => self.layout.temp_to_reg(operand.operand_index())?, + }; + self.instrs + .try_replace_result(result, old_result, &self.layout, &self.module) + } + + /// Encodes an unconditional Wasm `branch` instruction. + fn encode_br(&mut self, label: LabelRef) -> Result { + let instr = self.instrs.next_instr(); + let offset = self.labels.try_resolve_label(label, instr)?; + let br_instr = self.push_instr(Instruction::branch(offset), FuelCostsProvider::base)?; + Ok(br_instr) + } + + /// Encodes a `i32.eqz`+`br_if` or `if` conditional branch instruction. + fn encode_br_eqz(&mut self, condition: Operand, label: LabelRef) -> Result<(), Error> { + self.encode_br_if(condition, label, true) + } + + /// Encodes a `br_if` conditional branch instruction. + fn encode_br_nez(&mut self, condition: Operand, label: LabelRef) -> Result<(), Error> { + self.encode_br_if(condition, label, false) + } + + /// Encodes a generic `br_if` fused conditional branch instruction. + fn encode_br_if( + &mut self, + condition: Operand, + label: LabelRef, + branch_eqz: bool, + ) -> Result<(), Error> { + if self.try_fuse_branch_cmp(condition, label, branch_eqz)? { + return Ok(()); + } + let condition = match condition { + Operand::Local(condition) => self.layout.local_to_reg(condition.local_index())?, + Operand::Temp(condition) => self.layout.temp_to_reg(condition.operand_index())?, + Operand::Immediate(condition) => { + let condition = i32::from(condition.val()); + let take_branch = match branch_eqz { + true => condition == 0, + false => condition != 0, + }; + match take_branch { + true => { + self.encode_br(label)?; + self.reachable = false; + return Ok(()); + } + false => return Ok(()), + } + } + }; + let instr = self.instrs.next_instr(); + let offset = self.labels.try_resolve_label(label, instr)?; + let instr = match BranchOffset16::try_from(offset) { + Ok(offset) => match branch_eqz { + true => Instruction::branch_i32_eq_imm16(condition, 0, offset), + false => Instruction::branch_i32_ne_imm16(condition, 0, offset), + }, + Err(_) => { + let zero = self.layout.const_to_reg(0_i32)?; + let comparator = match branch_eqz { + true => Comparator::I32Eq, + false => Comparator::I32Ne, + }; + self.make_branch_cmp_fallback(comparator, condition, zero, offset)? + } + }; + self.push_instr(instr, FuelCostsProvider::base)?; + Ok(()) + } + + /// Create an [`Instruction::BranchCmpFallback`]. + fn make_branch_cmp_fallback( + &mut self, + cmp: Comparator, + lhs: Reg, + rhs: Reg, + offset: BranchOffset, + ) -> Result { + let params = self + .layout + .const_to_reg(ComparatorAndOffset::new(cmp, offset))?; + Ok(Instruction::branch_cmp_fallback(lhs, rhs, params)) + } + + /// Try to fuse a cmp+branch [`Instruction`] with optional negation. + fn try_fuse_branch_cmp( + &mut self, + condition: Operand, + label: LabelRef, + negate: bool, + ) -> Result { + let Some(last_instr) = self.instrs.last_instr() else { + // Case: cannot fuse without a known last instruction + return Ok(false); + }; + let Operand::Temp(condition) = condition else { + // Case: cannot fuse non-temporary operands + // - locals have observable behavior. + // - immediates cannot be the result of a previous instruction. + return Ok(false); + }; + let Some(origin) = condition.instr() else { + // Case: cannot fuse temporary operands without origin instruction + return Ok(false); + }; + if last_instr != origin { + // Case: cannot fuse if last instruction does not match origin instruction + return Ok(false); + } + debug_assert!(matches!(condition.ty(), ValType::I32 | ValType::I64)); + let fused_instr = self.try_make_fused_branch_cmp_instr(origin, condition, label, negate)?; + let Some(fused_instr) = fused_instr else { + // Case: not possible to perform fusion with last instruction + return Ok(false); + }; + assert!( + self.instrs.try_replace_instr(origin, fused_instr)?, + "op-code fusion must suceed at this point", + ); + Ok(true) + } + + /// Try to return a fused cmp+branch [`Instruction`] from the given parameters. + /// + /// + /// # Note + /// + /// - The `instr` parameter refers to the to-be-fused cmp instruction. + /// - Returns `Ok(Some)` if cmp+branch fusion was successful. + /// - Returns `Ok(None)`, otherwise. + fn try_make_fused_branch_cmp_instr( + &mut self, + instr: Instr, + condition: TempOperand, + label: LabelRef, + negate: bool, + ) -> Result, Error> { + let cmp_instr = *self.instrs.get(instr); + let Some(result) = cmp_instr.compare_result() else { + // Note: cannot fuse non-cmp instructions or cmp-instructions without result. + return Ok(None); + }; + if matches!(self.layout.stack_space(result), StackSpace::Local) { + // Note: cannot fuse cmp instructions with observable semantics. + return Ok(None); + } + if result != self.layout.temp_to_reg(condition.operand_index())? { + // Note: cannot fuse cmp instruction with a result that differs + // from the condition operand. + return Ok(None); + } + let cmp_instr = match negate { + false => cmp_instr, + true => match cmp_instr.negate_cmp_instr() { + Some(negated) => negated, + None => { + // Note: cannot negate cmp instruction, thus not possible to fuse. + return Ok(None); + } + }, + }; + let offset = self.labels.try_resolve_label(label, instr)?; + let fused = cmp_instr + .try_into_cmp_branch_instr(offset, &mut self.layout)? + .expect("cmp+branch fusion must succeed"); + Ok(Some(fused)) + } + + /// Translates a unary Wasm instruction to Wasmi bytecode. + fn translate_unary( + &mut self, + make_instr: fn(result: Reg, input: Reg) -> Instruction, + consteval: fn(input: T) -> R, + ) -> Result<(), Error> + where + T: From, + R: Into + Typed, + { + bail_unreachable!(self); + let input = self.stack.pop(); + if let Operand::Immediate(input) = input { + self.stack.push_immediate(consteval(input.val().into()))?; + return Ok(()); + } + let input = self.layout.operand_to_reg(input)?; + self.push_instr_with_result( + ::TY, + |result| make_instr(result, input), + FuelCostsProvider::base, + ) + } + + /// Translates a unary Wasm instruction to Wasmi bytecode. + fn translate_unary_fallible( + &mut self, + make_instr: fn(result: Reg, input: Reg) -> Instruction, + consteval: fn(input: T) -> Result, + ) -> Result<(), Error> + where + T: From, + R: Into + Typed, + { + bail_unreachable!(self); + let input = self.stack.pop(); + if let Operand::Immediate(input) = input { + let input = T::from(input.val()); + match consteval(input) { + Ok(result) => { + self.stack.push_immediate(result)?; + } + Err(trap) => { + self.translate_trap(trap)?; + } + } + return Ok(()); + } + let input = self.layout.operand_to_reg(input)?; + self.push_instr_with_result( + ::TY, + |result| make_instr(result, input), + FuelCostsProvider::base, + ) + } + + /// Translate a generic Wasm reinterpret-like operation. + /// + /// # Note + /// + /// This Wasm operation is a no-op. Ideally we only have to change the types on the stack. + fn translate_reinterpret(&mut self, consteval: fn(T) -> R) -> Result<(), Error> + where + T: From + Typed, + R: Into + Typed, + { + bail_unreachable!(self); + match self.stack.pop() { + Operand::Local(input) => { + debug_assert_eq!(input.ty(), ::TY); + // TODO: improve performance by allowing type overwrites for local operands + let input = self.layout.local_to_reg(input.local_index())?; + self.push_instr_with_result( + ::TY, + |result| Instruction::copy(result, input), + FuelCostsProvider::base, + )?; + } + Operand::Temp(input) => { + debug_assert_eq!(input.ty(), ::TY); + self.stack.push_temp(::TY, None)?; + } + Operand::Immediate(input) => { + let input: T = input.val().into(); + self.stack.push_immediate(consteval(input))?; + } + } + Ok(()) + } + + /// Creates a new 16-bit encoded [`Input16`] from the given `value`. + pub fn make_imm16(&mut self, value: T) -> Result, Error> + where + T: Into + Copy + TryInto>, + { + match value.try_into() { + Ok(rhs) => Ok(Input::Immediate(rhs)), + Err(_) => { + let rhs = self.layout.const_to_reg(value)?; + Ok(Input::Reg(rhs)) + } + } + } + + /// Creates a new 16-bit encoded [`Input16`] from the given `operand`. + pub fn make_input16(&mut self, operand: Operand) -> Result, Error> + where + T: From + Into + TryInto> + Copy, + { + self.make_input(operand, |this, imm| { + let opd16 = match T::from(imm).try_into() { + Ok(rhs) => Input::Immediate(rhs), + Err(_) => { + let rhs = this.layout.const_to_reg(imm)?; + Input::Reg(rhs) + } + }; + Ok(opd16) + }) + } + + /// Create a new generic [`Input`] from the given `operand`. + fn make_input( + &mut self, + operand: Operand, + f: impl FnOnce(&mut Self, TypedVal) -> Result, Error>, + ) -> Result, Error> { + let reg = match operand { + Operand::Local(operand) => self.layout.local_to_reg(operand.local_index())?, + Operand::Temp(operand) => self.layout.temp_to_reg(operand.operand_index())?, + Operand::Immediate(operand) => return f(self, operand.val()), + }; + Ok(Input::Reg(reg)) + } + + /// Converts the `provider` to a 16-bit index-type constant value. + /// + /// # Note + /// + /// - Turns immediates that cannot be 16-bit encoded into function local constants. + /// - The behavior is different whether `memory64` is enabled or disabled. + pub(super) fn make_index16( + &mut self, + operand: Operand, + index_type: IndexType, + ) -> Result, Error> { + let value = match operand { + Operand::Immediate(value) => value.val(), + operand => { + debug_assert_eq!(operand.ty(), index_type.ty()); + let reg = self.layout.operand_to_reg(operand)?; + return Ok(Input::Reg(reg)); + } + }; + match index_type { + IndexType::I64 => { + if let Ok(value) = Const16::try_from(u64::from(value)) { + return Ok(Input::Immediate(value)); + } + } + IndexType::I32 => { + if let Ok(value) = Const16::try_from(u32::from(value)) { + return Ok(Input::Immediate(>::cast(value))); + } + } + } + let reg = self.layout.const_to_reg(value)?; + Ok(Input::Reg(reg)) + } + + /// Converts the `provider` to a 32-bit index-type constant value. + /// + /// # Note + /// + /// - Turns immediates that cannot be 32-bit encoded into function local constants. + /// - The behavior is different whether `memory64` is enabled or disabled. + pub(super) fn make_index32( + &mut self, + operand: Operand, + index_type: IndexType, + ) -> Result, Error> { + let value = match operand { + Operand::Immediate(value) => value.val(), + operand => { + debug_assert_eq!(operand.ty(), index_type.ty()); + let reg = self.layout.operand_to_reg(operand)?; + return Ok(Input::Reg(reg)); + } + }; + match index_type { + IndexType::I64 => { + if let Ok(value) = Const32::try_from(u64::from(value)) { + return Ok(Input::Immediate(value)); + } + } + IndexType::I32 => { + let value32 = Const32::from(u32::from(value)); + return Ok(Input::Immediate(>::cast(value32))); + } + } + let reg = self.layout.const_to_reg(value)?; + Ok(Input::Reg(reg)) + } + + /// Evaluates `consteval(lhs, rhs)` and pushed either its result or tranlates a `trap`. + fn translate_binary_consteval_fallible( + &mut self, + lhs: ImmediateOperand, + rhs: ImmediateOperand, + consteval: impl FnOnce(T, T) -> Result, + ) -> Result<(), Error> + where + T: From, + R: Into, + { + let lhs: T = lhs.val().into(); + let rhs: T = rhs.val().into(); + match consteval(lhs, rhs) { + Ok(value) => { + self.stack.push_immediate(value)?; + } + Err(trap) => { + self.translate_trap(trap)?; + } + } + Ok(()) + } + + /// Evaluates `consteval(lhs, rhs)` and pushed either its result or tranlates a `trap`. + fn translate_binary_consteval( + &mut self, + lhs: ImmediateOperand, + rhs: ImmediateOperand, + consteval: fn(T, T) -> R, + ) -> Result<(), Error> + where + T: From, + R: Into, + { + self.translate_binary_consteval_fallible::(lhs, rhs, |lhs, rhs| { + Ok(consteval(lhs, rhs)) + }) + } + + /// Convenience method to tell that there is no custom optimization. + fn no_opt_ri(&mut self, _lhs: Operand, _rhs: T) -> Result { + Ok(false) + } + + /// Translates a commutative binary Wasm operator to Wasmi bytecode. + fn translate_binary_commutative( + &mut self, + make_rr: fn(result: Reg, lhs: Reg, rhs: Reg) -> Instruction, + make_ri: fn(result: Reg, lhs: Reg, rhs: Const16) -> Instruction, + consteval: fn(T, T) -> R, + opt_ri: fn(this: &mut Self, lhs: Operand, rhs: T) -> Result, + ) -> Result<(), Error> + where + T: WasmInteger + TryInto>, + R: Into + Typed, + { + bail_unreachable!(self); + match self.stack.pop2() { + (Operand::Immediate(lhs), Operand::Immediate(rhs)) => { + self.translate_binary_consteval::(lhs, rhs, consteval) + } + (val, Operand::Immediate(imm)) | (Operand::Immediate(imm), val) => { + let rhs = imm.val().into(); + if opt_ri(self, val, rhs)? { + return Ok(()); + } + let lhs = self.layout.operand_to_reg(val)?; + let rhs16 = self.make_imm16(rhs)?; + self.push_instr_with_result( + ::TY, + |result| match rhs16 { + Input::Immediate(rhs) => make_ri(result, lhs, rhs), + Input::Reg(rhs) => make_rr(result, lhs, rhs), + }, + FuelCostsProvider::base, + ) + } + (lhs, rhs) => self.push_binary_instr_with_result( + ::TY, + lhs, + rhs, + make_rr, + FuelCostsProvider::base, + ), + } + } + + /// Translates integer division and remainder Wasm operators to Wasmi bytecode. + fn translate_divrem( + &mut self, + make_instr: fn(result: Reg, lhs: Reg, rhs: Reg) -> Instruction, + make_instr_imm16_rhs: fn( + result: Reg, + lhs: Reg, + rhs: Const16<::NonZero>, + ) -> Instruction, + make_instr_imm16_lhs: fn(result: Reg, lhs: Const16, rhs: Reg) -> Instruction, + consteval: fn(T, T) -> Result, + ) -> Result<(), Error> + where + T: WasmInteger, + { + bail_unreachable!(self); + match self.stack.pop2() { + (Operand::Immediate(lhs), Operand::Immediate(rhs)) => { + self.translate_binary_consteval_fallible::(lhs, rhs, consteval) + } + (lhs, Operand::Immediate(rhs)) => { + let lhs = self.layout.operand_to_reg(lhs)?; + let rhs = T::from(rhs.val()); + let Some(non_zero_rhs) = ::non_zero(rhs) else { + // Optimization: division by zero always traps + return self.translate_trap(TrapCode::IntegerDivisionByZero); + }; + let rhs16 = self.make_imm16(non_zero_rhs)?; + self.push_instr_with_result( + ::TY, + |result| match rhs16 { + Input::Immediate(rhs) => make_instr_imm16_rhs(result, lhs, rhs), + Input::Reg(rhs) => make_instr(result, lhs, rhs), + }, + FuelCostsProvider::base, + ) + } + (Operand::Immediate(lhs), rhs) => { + let lhs = T::from(lhs.val()); + let lhs16 = self.make_imm16(lhs)?; + let rhs = self.layout.operand_to_reg(rhs)?; + self.push_instr_with_result( + ::TY, + |result| match lhs16 { + Input::Immediate(lhs) => make_instr_imm16_lhs(result, lhs, rhs), + Input::Reg(lhs) => make_instr(result, lhs, rhs), + }, + FuelCostsProvider::base, + ) + } + (lhs, rhs) => self.push_binary_instr_with_result( + ::TY, + lhs, + rhs, + make_instr, + FuelCostsProvider::base, + ), + } + } + + /// Translates binary non-commutative Wasm operators to Wasmi bytecode. + fn translate_binary( + &mut self, + make_instr: fn(result: Reg, lhs: Reg, rhs: Reg) -> Instruction, + make_instr_imm16_rhs: fn(result: Reg, lhs: Reg, rhs: Const16) -> Instruction, + make_instr_imm16_lhs: fn(result: Reg, lhs: Const16, rhs: Reg) -> Instruction, + consteval: fn(T, T) -> R, + ) -> Result<(), Error> + where + T: WasmInteger, + R: Into + Typed, + { + bail_unreachable!(self); + match self.stack.pop2() { + (Operand::Immediate(lhs), Operand::Immediate(rhs)) => { + self.translate_binary_consteval::(lhs, rhs, consteval) + } + (lhs, Operand::Immediate(rhs)) => { + let lhs = self.layout.operand_to_reg(lhs)?; + let rhs = T::from(rhs.val()); + let rhs16 = self.make_imm16(rhs)?; + self.push_instr_with_result( + ::TY, + |result| match rhs16 { + Input::Immediate(rhs) => make_instr_imm16_rhs(result, lhs, rhs), + Input::Reg(rhs) => make_instr(result, lhs, rhs), + }, + FuelCostsProvider::base, + ) + } + (Operand::Immediate(lhs), rhs) => { + let lhs = T::from(lhs.val()); + let lhs16 = self.make_imm16(lhs)?; + let rhs = self.layout.operand_to_reg(rhs)?; + self.push_instr_with_result( + ::TY, + |result| match lhs16 { + Input::Immediate(lhs) => make_instr_imm16_lhs(result, lhs, rhs), + Input::Reg(lhs) => make_instr(result, lhs, rhs), + }, + FuelCostsProvider::base, + ) + } + (lhs, rhs) => self.push_binary_instr_with_result( + ::TY, + lhs, + rhs, + make_instr, + FuelCostsProvider::base, + ), + } + } + + /// Translates Wasm `i{32,64}.sub` operators to Wasmi bytecode. + fn translate_isub( + &mut self, + make_sub_rr: fn(result: Reg, lhs: Reg, rhs: Reg) -> Instruction, + make_add_ri: fn(result: Reg, lhs: Reg, rhs: Const16) -> Instruction, + make_sub_ir: fn(result: Reg, lhs: Const16, rhs: Reg) -> Instruction, + consteval: fn(T, T) -> R, + ) -> Result<(), Error> + where + T: WasmInteger, + R: Into + Typed, + { + bail_unreachable!(self); + match self.stack.pop2() { + (Operand::Immediate(lhs), Operand::Immediate(rhs)) => { + self.translate_binary_consteval::(lhs, rhs, consteval) + } + (lhs, Operand::Immediate(rhs)) => { + let lhs = self.layout.operand_to_reg(lhs)?; + let rhs = T::from(rhs.val()); + let rhs16 = match rhs.wrapping_neg().try_into() { + Ok(rhs) => Input::Immediate(rhs), + Err(_) => { + let rhs = self.layout.const_to_reg(rhs)?; + Input::Reg(rhs) + } + }; + self.push_instr_with_result( + ::TY, + |result| match rhs16 { + Input::Immediate(rhs) => make_add_ri(result, lhs, rhs), + Input::Reg(rhs) => make_sub_rr(result, lhs, rhs), + }, + FuelCostsProvider::base, + ) + } + (Operand::Immediate(lhs), rhs) => { + let lhs = T::from(lhs.val()); + let lhs16 = self.make_imm16(lhs)?; + let rhs = self.layout.operand_to_reg(rhs)?; + self.push_instr_with_result( + ::TY, + |result| match lhs16 { + Input::Immediate(lhs) => make_sub_ir(result, lhs, rhs), + Input::Reg(lhs) => make_sub_rr(result, lhs, rhs), + }, + FuelCostsProvider::base, + ) + } + (lhs, rhs) => self.push_binary_instr_with_result( + ::TY, + lhs, + rhs, + make_sub_rr, + FuelCostsProvider::base, + ), + } + } + + /// Translates Wasm shift and rotate operators to Wasmi bytecode. + fn translate_shift( + &mut self, + make_instr: fn(result: Reg, lhs: Reg, rhs: Reg) -> Instruction, + make_instr_imm16_rhs: fn( + result: Reg, + lhs: Reg, + rhs: ::Output, + ) -> Instruction, + make_instr_imm16_lhs: fn(result: Reg, lhs: Const16, rhs: Reg) -> Instruction, + consteval: fn(T, T) -> T, + ) -> Result<(), Error> + where + T: WasmInteger + IntoShiftAmount>, + Const16: From, + { + bail_unreachable!(self); + match self.stack.pop2() { + (Operand::Immediate(lhs), Operand::Immediate(rhs)) => { + self.translate_binary_consteval::(lhs, rhs, consteval) + } + (lhs, Operand::Immediate(rhs)) => { + let Some(rhs) = T::into_shift_amount(rhs.val().into()) else { + // Optimization: Shifting or rotating by zero bits is a no-op. + self.stack.push_operand(lhs)?; + return Ok(()); + }; + let lhs = self.layout.operand_to_reg(lhs)?; + self.push_instr_with_result( + ::TY, + |result| make_instr_imm16_rhs(result, lhs, rhs), + FuelCostsProvider::base, + ) + } + (Operand::Immediate(lhs), rhs) => { + let lhs = T::from(lhs.val()); + if lhs.is_zero() { + // Optimization: Shifting or rotating a zero value is a no-op. + self.stack.push_immediate(lhs)?; + return Ok(()); + } + let lhs16 = self.make_imm16(lhs)?; + let rhs = self.layout.operand_to_reg(rhs)?; + self.push_instr_with_result( + ::TY, + |result| match lhs16 { + Input::Immediate(lhs) => make_instr_imm16_lhs(result, lhs, rhs), + Input::Reg(lhs) => make_instr(result, lhs, rhs), + }, + FuelCostsProvider::base, + ) + } + (lhs, rhs) => self.push_binary_instr_with_result( + ::TY, + lhs, + rhs, + make_instr, + FuelCostsProvider::base, + ), + } + } + + /// Translate a binary float Wasm operation. + fn translate_fbinary( + &mut self, + make_instr: fn(result: Reg, lhs: Reg, rhs: Reg) -> Instruction, + consteval: fn(T, T) -> R, + ) -> Result<(), Error> + where + T: WasmFloat, + R: Into + Typed, + { + bail_unreachable!(self); + let (lhs, rhs) = self.stack.pop2(); + if let (Operand::Immediate(lhs), Operand::Immediate(rhs)) = (lhs, rhs) { + return self.translate_binary_consteval::(lhs, rhs, consteval); + } + self.push_binary_instr_with_result( + ::TY, + lhs, + rhs, + make_instr, + FuelCostsProvider::base, + ) + } + + /// Translate Wasmi `{f32,f64}.copysign` instructions. + /// + /// # Note + /// + /// - This applies some optimization that are valid for copysign instructions. + /// - Applies constant evaluation if both operands are constant values. + fn translate_fcopysign( + &mut self, + make_instr: fn(result: Reg, lhs: Reg, rhs: Reg) -> Instruction, + make_instr_imm: fn(result: Reg, lhs: Reg, rhs: Sign) -> Instruction, + consteval: fn(T, T) -> T, + ) -> Result<(), Error> + where + T: WasmFloat, + { + bail_unreachable!(self); + match self.stack.pop2() { + (Operand::Immediate(lhs), Operand::Immediate(rhs)) => { + self.translate_binary_consteval::(lhs, rhs, consteval) + } + (lhs, Operand::Immediate(rhs)) => { + let lhs = self.layout.operand_to_reg(lhs)?; + let sign = T::from(rhs.val()).sign(); + self.push_instr_with_result( + ::TY, + |result| make_instr_imm(result, lhs, sign), + FuelCostsProvider::base, + ) + } + (lhs, rhs) => { + if lhs.is_same(&rhs) { + // Optimization: `copysign x x` is always just `x` + self.stack.push_operand(lhs)?; + return Ok(()); + } + self.push_binary_instr_with_result( + ::TY, + lhs, + rhs, + make_instr, + FuelCostsProvider::base, + ) + } + } + } + + /// Translates a generic trap instruction. + fn translate_trap(&mut self, trap: TrapCode) -> Result<(), Error> { + self.push_instr(Instruction::trap(trap), FuelCostsProvider::base)?; + self.reachable = false; + Ok(()) + } + + /// Translates a Wasm `select` or `select ` instruction. + /// + /// # Note + /// + /// - This applies constant propagation in case `condition` is a constant value. + /// - If both `lhs` and `rhs` are equal registers or constant values `lhs` is forwarded. + /// - Fuses compare instructions with the associated select instructions if possible. + fn translate_select(&mut self, type_hint: Option) -> Result<(), Error> { + bail_unreachable!(self); + let (true_val, false_val, condition) = self.stack.pop3(); + if let Some(type_hint) = type_hint { + debug_assert_eq!(true_val.ty(), type_hint); + debug_assert_eq!(false_val.ty(), type_hint); + } + let ty = true_val.ty(); + if true_val.is_same(&false_val) { + // Optimization: both `lhs` and `rhs` either are the same register or constant values and + // thus `select` will always yield this same value irrespective of the condition. + self.stack.push_operand(true_val)?; + return Ok(()); + } + if let Operand::Immediate(condition) = condition { + // Optimization: since condition is a constant value we can const-fold the `select` + // instruction and simply push the selected value back to the provider stack. + let condition = i32::from(condition.val()) != 0; + let selected = match condition { + true => true_val, + false => false_val, + }; + if let Operand::Temp(selected) = selected { + // Case: the selected operand is a temporary which needs to be copied + // if it was the `false_val` since it changed its index. This is + // not the case for the `true_val` since `true_val` is the first + // value popped from the stack. + if !condition { + let selected = self.layout.temp_to_reg(selected.operand_index())?; + self.push_instr_with_result( + ty, + |result| Instruction::copy(result, selected), + FuelCostsProvider::base, + )?; + } + } + self.stack.push_operand(selected)?; + return Ok(()); + } + let condition = self.layout.operand_to_reg(condition)?; + let mut true_val = self.layout.operand_to_reg(true_val)?; + let mut false_val = self.layout.operand_to_reg(false_val)?; + match self + .instrs + .try_fuse_select(ty, condition, &self.layout, &mut self.stack)? + { + Some(swap_operands) => { + if swap_operands { + mem::swap(&mut true_val, &mut false_val); + } + } + None => { + self.push_instr_with_result( + ty, + |result| Instruction::select_i32_eq_imm16(result, condition, 0_i16), + FuelCostsProvider::base, + )?; + mem::swap(&mut true_val, &mut false_val); + } + }; + self.push_param(Instruction::register2_ext(true_val, false_val))?; + Ok(()) + } + + /// Create either [`Instruction::CallIndirectParams`] or [`Instruction::CallIndirectParamsImm16`] depending on the inputs. + fn call_indirect_params( + &mut self, + index: Operand, + table_index: u32, + ) -> Result { + let table_type = *self.module.get_type_of_table(TableIdx::from(table_index)); + let index = self.make_index16(index, table_type.index_ty())?; + let instr = match index { + Input::Reg(index) => Instruction::call_indirect_params(index, table_index), + Input::Immediate(index) => Instruction::call_indirect_params_imm16(index, table_index), + }; + Ok(instr) + } + + /// Tries to fuse a Wasm `i32.eqz` (or `i32.eq` with 0 `rhs` value) instruction. + /// + /// Returns + /// + /// - `Ok(true)` if the intruction fusion was successful. + /// - `Ok(false)` if instruction fusion could not be applied. + /// - `Err(_)` if an error occurred. + pub fn fuse_eqz(&mut self, lhs: Operand, rhs: T) -> Result { + self.fuse_commutative_cmp_with(lhs, rhs, NegateCmpInstr::negate_cmp_instr) + } + + /// Tries to fuse a Wasm `i32.ne` instruction with 0 `rhs` value. + /// + /// Returns + /// + /// - `Ok(true)` if the intruction fusion was successful. + /// - `Ok(false)` if instruction fusion could not be applied. + /// - `Err(_)` if an error occurred. + pub fn fuse_nez(&mut self, lhs: Operand, rhs: T) -> Result { + self.fuse_commutative_cmp_with(lhs, rhs, LogicalizeCmpInstr::logicalize_cmp_instr) + } + + /// Tries to fuse a `i{32,64}`.{eq,ne}` instruction with `rhs` of zero. + /// + /// Generically applies `f` onto the fused last instruction. + /// + /// Returns + /// + /// - `Ok(true)` if the intruction fusion was successful. + /// - `Ok(false)` if instruction fusion could not be applied. + /// - `Err(_)` if an error occurred. + pub fn fuse_commutative_cmp_with( + &mut self, + lhs: Operand, + rhs: T, + f: fn(&Instruction) -> Option, + ) -> Result { + if !rhs.is_zero() { + // Case: cannot fuse with non-zero `rhs` + return Ok(false); + } + let Some(last_instr) = self.instrs.last_instr() else { + // Case: cannot fuse without registered last instruction + return Ok(false); + }; + let Operand::Temp(lhs_opd) = lhs else { + // Case: cannot fuse non-temporary operands + // - locals have observable behavior. + // - immediates cannot be the result of a previous instruction. + return Ok(false); + }; + let Some(origin) = lhs_opd.instr() else { + // Case: `lhs` has no origin instruciton, thus not possible to fuse. + return Ok(false); + }; + if origin != last_instr { + // Case: `lhs`'s origin instruction does not match the last instruction + return Ok(false); + } + let lhs_reg = self.layout.temp_to_reg(lhs_opd.operand_index())?; + let last_instruction = self.instrs.get(last_instr); + let Some(result) = last_instruction.compare_result() else { + // Case: cannot fuse non-cmp instructions + return Ok(false); + }; + if result != lhs_reg { + // Case: the `cmp` instruction does not feed into the `eqz` and cannot be fused + return Ok(false); + } + let Some(negated) = f(last_instruction) else { + // Case: the `cmp` instruction cannot be negated + return Ok(false); + }; + if !self.instrs.try_replace_instr(last_instr, negated)? { + // Case: could not replace the `cmp` instruction with the fused one + return Ok(false); + } + self.stack.push_operand(lhs)?; + Ok(true) + } + + /// Translates a Wasm `load` instruction to Wasmi bytecode. + /// + /// # Note + /// + /// This chooses the right encoding for the given `load` instruction. + /// If `ptr+offset` is a constant value the address is pre-calculated. + /// + /// # Usage + /// + /// Used for translating the following Wasm operators to Wasmi bytecode: + /// + /// - `{i32, i64, f32, f64}.load` + /// - `i32.{load8_s, load8_u, load16_s, load16_u}` + /// - `i64.{load8_s, load8_u, load16_s, load16_u load32_s, load32_u}` + fn translate_load( + &mut self, + memarg: MemArg, + loaded_ty: ValType, + make_instr: fn(result: Reg, offset_lo: Offset64Lo) -> Instruction, + make_instr_offset16: fn(result: Reg, ptr: Reg, offset: Offset16) -> Instruction, + make_instr_at: fn(result: Reg, address: Address32) -> Instruction, + ) -> Result<(), Error> { + bail_unreachable!(self); + let (memory, offset) = Self::decode_memarg(memarg); + let ptr = self.stack.pop(); + let (ptr, offset) = match ptr { + Operand::Immediate(ptr) => { + let ptr = ptr.val(); + let Some(address) = self.effective_address(memory, ptr, offset) else { + return self.translate_trap(TrapCode::MemoryOutOfBounds); + }; + if let Ok(address) = Address32::try_from(address) { + self.push_instr_with_result( + loaded_ty, + |result| make_instr_at(result, address), + FuelCostsProvider::load, + )?; + if !memory.is_default() { + self.push_param(Instruction::memory_index(memory))?; + } + return Ok(()); + } + // Case: we cannot use specialized encoding and thus have to fall back + // to the general case where `ptr` is zero and `offset` stores the + // `ptr+offset` address value. + let zero_ptr = self.layout.const_to_reg(0_u64)?; + (zero_ptr, u64::from(address)) + } + ptr => { + let ptr = self.layout.operand_to_reg(ptr)?; + (ptr, offset) + } + }; + if memory.is_default() { + if let Ok(offset) = Offset16::try_from(offset) { + self.push_instr_with_result( + loaded_ty, + |result| make_instr_offset16(result, ptr, offset), + FuelCostsProvider::load, + )?; + return Ok(()); + } + } + let (offset_hi, offset_lo) = Offset64::split(offset); + self.push_instr_with_result( + loaded_ty, + |result| make_instr(result, offset_lo), + FuelCostsProvider::load, + )?; + self.push_param(Instruction::register_and_offset_hi(ptr, offset_hi))?; + if !memory.is_default() { + self.push_param(Instruction::memory_index(memory))?; + } + Ok(()) + } + + /// Translates Wasm integer `store` and `storeN` instructions to Wasmi bytecode. + /// + /// # Note + /// + /// This chooses the most efficient encoding for the given `store` instruction. + /// If `ptr+offset` is a constant value the pointer address is pre-calculated. + /// + /// # Usage + /// + /// Used for translating the following Wasm operators to Wasmi bytecode: + /// + /// - `{i32, i64}.{store, store8, store16, store32}` + fn translate_istore_wrap( + &mut self, + memarg: MemArg, + ) -> Result<(), Error> + where + T::Value: Copy + Wrap + From, + T::Param: TryFrom + Into, + { + bail_unreachable!(self); + let (ptr, value) = self.stack.pop2(); + self.encode_istore_wrap::(memarg, ptr, value) + } + + /// Encodes Wasm integer `store` and `storeN` instructions as Wasmi bytecode. + fn encode_istore_wrap( + &mut self, + memarg: MemArg, + ptr: Operand, + value: Operand, + ) -> Result<(), Error> + where + T::Value: Copy + Wrap + From, + T::Param: TryFrom + Into, + { + let (memory, offset) = Self::decode_memarg(memarg); + let (ptr, offset) = match ptr { + Operand::Immediate(ptr) => { + let ptr = ptr.val(); + let Some(address) = self.effective_address(memory, ptr, offset) else { + return self.translate_trap(TrapCode::MemoryOutOfBounds); + }; + if let Ok(address) = Address32::try_from(address) { + return self.encode_istore_wrap_at::(memory, address, value); + } + // Case: we cannot use specialized encoding and thus have to fall back + // to the general case where `ptr` is zero and `offset` stores the + // `ptr+offset` address value. + let zero_ptr = self.layout.const_to_reg(0_u64)?; + (zero_ptr, u64::from(address)) + } + ptr => { + let ptr = self.layout.operand_to_reg(ptr)?; + (ptr, offset) + } + }; + if memory.is_default() { + if let Some(_instr) = self.encode_istore_wrap_mem0::(ptr, offset, value)? { + return Ok(()); + } + } + let (offset_hi, offset_lo) = Offset64::split(offset); + let (instr, param) = { + match value { + Operand::Immediate(value) => { + let value = value.val(); + match T::Param::try_from(T::Value::from(value).wrap()).ok() { + Some(value) => ( + T::store_imm(ptr, offset_lo), + Instruction::imm16_and_offset_hi(value, offset_hi), + ), + None => ( + T::store(ptr, offset_lo), + Instruction::register_and_offset_hi( + self.layout.const_to_reg(value)?, + offset_hi, + ), + ), + } + } + value => { + let value = self.layout.operand_to_reg(value)?; + ( + T::store(ptr, offset_lo), + Instruction::register_and_offset_hi(value, offset_hi), + ) + } + } + }; + self.push_instr(instr, FuelCostsProvider::store)?; + self.push_param(param)?; + if !memory.is_default() { + self.push_param(Instruction::memory_index(memory))?; + } + Ok(()) + } + + /// Encodes a Wasm integer `store` and `storeN` instructions as Wasmi bytecode. + /// + /// # Note + /// + /// This is used in cases where the `ptr` is a known constant value. + fn encode_istore_wrap_at( + &mut self, + memory: index::Memory, + address: Address32, + value: Operand, + ) -> Result<(), Error> + where + T::Value: Copy + From + Wrap, + T::Param: TryFrom, + { + match value { + Operand::Immediate(value) => { + let value = value.val(); + let wrapped = T::Value::from(value).wrap(); + if let Ok(value) = T::Param::try_from(wrapped) { + self.push_instr(T::store_at_imm(value, address), FuelCostsProvider::store)?; + } else { + let value = self.layout.const_to_reg(value)?; + self.push_instr(T::store_at(value, address), FuelCostsProvider::store)?; + } + } + value => { + let value = self.layout.operand_to_reg(value)?; + self.push_instr(T::store_at(value, address), FuelCostsProvider::store)?; + } + } + if !memory.is_default() { + self.push_param(Instruction::memory_index(memory))?; + } + Ok(()) + } + + /// Encodes a Wasm integer `store` and `storeN` instructions as Wasmi bytecode. + /// + /// # Note + /// + /// This optimizes for cases where the Wasm linear memory that is operated on is known + /// to be the default memory. + /// Returns `Some` in case the optimized instructions have been encoded. + fn encode_istore_wrap_mem0( + &mut self, + ptr: Reg, + offset: u64, + value: Operand, + ) -> Result, Error> + where + T::Value: Copy + From + Wrap, + T::Param: TryFrom, + { + let Ok(offset16) = Offset16::try_from(offset) else { + return Ok(None); + }; + let instr = match value { + Operand::Immediate(value) => { + let value = value.val(); + let wrapped = T::Value::from(value).wrap(); + match T::Param::try_from(wrapped) { + Ok(value) => self.push_instr( + T::store_offset16_imm(ptr, offset16, value), + FuelCostsProvider::store, + )?, + Err(_) => { + let value = self.layout.const_to_reg(value)?; + self.push_instr( + T::store_offset16(ptr, offset16, value), + FuelCostsProvider::store, + )? + } + } + } + value => { + let value = self.layout.operand_to_reg(value)?; + self.push_instr( + T::store_offset16(ptr, offset16, value), + FuelCostsProvider::store, + )? + } + }; + Ok(Some(instr)) + } + + /// Translates a general Wasm `store` instruction to Wasmi bytecode. + /// + /// # Note + /// + /// This chooses the most efficient encoding for the given `store` instruction. + /// If `ptr+offset` is a constant value the pointer address is pre-calculated. + /// + /// # Usage + /// + /// Used for translating the following Wasm operators to Wasmi bytecode: + /// + /// - `{f32, f64, v128}.store` + fn translate_store( + &mut self, + memarg: MemArg, + store: fn(ptr: Reg, offset_lo: Offset64Lo) -> Instruction, + store_offset16: fn(ptr: Reg, offset: Offset16, value: Reg) -> Instruction, + store_at: fn(value: Reg, address: Address32) -> Instruction, + ) -> Result<(), Error> { + bail_unreachable!(self); + let (memory, offset) = Self::decode_memarg(memarg); + let (ptr, value) = self.stack.pop2(); + let (ptr, offset) = match ptr { + Operand::Immediate(ptr) => { + let Some(address) = self.effective_address(memory, ptr.val(), offset) else { + return self.translate_trap(TrapCode::MemoryOutOfBounds); + }; + if let Ok(address) = Address32::try_from(address) { + return self.encode_fstore_at(memory, address, value, store_at); + } + let zero_ptr = self.layout.const_to_reg(0_u64)?; + (zero_ptr, u64::from(address)) + } + ptr => { + let ptr = self.layout.operand_to_reg(ptr)?; + (ptr, offset) + } + }; + let (offset_hi, offset_lo) = Offset64::split(offset); + let value = self.layout.operand_to_reg(value)?; + if memory.is_default() { + if let Ok(offset) = Offset16::try_from(offset) { + self.push_instr(store_offset16(ptr, offset, value), FuelCostsProvider::store)?; + return Ok(()); + } + } + self.push_instr(store(ptr, offset_lo), FuelCostsProvider::store)?; + self.push_param(Instruction::register_and_offset_hi(value, offset_hi))?; + if !memory.is_default() { + self.push_param(Instruction::memory_index(memory))?; + } + Ok(()) + } + + /// Encodes a Wasm `store` instruction with immediate address as Wasmi bytecode. + /// + /// # Note + /// + /// This is used in cases where the `ptr` is a known constant value. + fn encode_fstore_at( + &mut self, + memory: index::Memory, + address: Address32, + value: Operand, + make_instr_at: fn(value: Reg, address: Address32) -> Instruction, + ) -> Result<(), Error> { + let value = self.layout.operand_to_reg(value)?; + self.push_instr(make_instr_at(value, address), FuelCostsProvider::store)?; + if !memory.is_default() { + self.push_param(Instruction::memory_index(memory))?; + } + Ok(()) + } + + /// Returns the [`MemArg`] linear `memory` index and load/store `offset`. + /// + /// # Panics + /// + /// If the [`MemArg`] offset is not 32-bit. + fn decode_memarg(memarg: MemArg) -> (index::Memory, u64) { + let memory = index::Memory::from(memarg.memory); + (memory, memarg.offset) + } + + /// Returns the effective address `ptr+offset` if it is valid. + fn effective_address(&self, mem: index::Memory, ptr: TypedVal, offset: u64) -> Option
{ + let memory_type = *self + .module + .get_type_of_memory(MemoryIdx::from(u32::from(mem))); + let ptr = match memory_type.is_64() { + true => u64::from(ptr), + false => u64::from(u32::from(ptr)), + }; + let Some(address) = ptr.checked_add(offset) else { + // Case: address overflows any legal memory index. + return None; + }; + if let Some(max) = memory_type.maximum() { + // The memory's maximum size in bytes. + let max_size = max << memory_type.page_size_log2(); + if address > max_size { + // Case: address overflows the memory's maximum size. + return None; + } + } + if !memory_type.is_64() && address >= 1 << 32 { + // Case: address overflows the 32-bit memory index. + return None; + } + let Ok(address) = Address::try_from(address) else { + // Case: address is too big for the system to handle properly. + return None; + }; + Some(address) + } + + /// Translates a Wasm `i64.binop128` instruction from the `wide-arithmetic` proposal. + fn translate_i64_binop128( + &mut self, + make_instr: fn(results: [Reg; 2], lhs_lo: Reg) -> Instruction, + const_eval: fn(lhs_lo: i64, lhs_hi: i64, rhs_lo: i64, rhs_hi: i64) -> (i64, i64), + ) -> Result<(), Error> { + bail_unreachable!(self); + let (rhs_lo, rhs_hi) = self.stack.pop2(); + let (lhs_lo, lhs_hi) = self.stack.pop2(); + if let ( + Operand::Immediate(lhs_lo), + Operand::Immediate(lhs_hi), + Operand::Immediate(rhs_lo), + Operand::Immediate(rhs_hi), + ) = (lhs_lo, lhs_hi, rhs_lo, rhs_hi) + { + let (result_lo, result_hi) = const_eval( + lhs_lo.val().into(), + lhs_hi.val().into(), + rhs_lo.val().into(), + rhs_hi.val().into(), + ); + self.stack.push_immediate(result_lo)?; + self.stack.push_immediate(result_hi)?; + return Ok(()); + } + let rhs_lo = self.layout.operand_to_reg(rhs_lo)?; + let rhs_hi = self.layout.operand_to_reg(rhs_hi)?; + let lhs_lo = self.layout.operand_to_reg(lhs_lo)?; + let lhs_hi = self.layout.operand_to_reg(lhs_hi)?; + let result_lo = self.stack.push_temp(ValType::I64, None)?; + let result_hi = self.stack.push_temp(ValType::I64, None)?; + let result_lo = self.layout.temp_to_reg(result_lo)?; + let result_hi = self.layout.temp_to_reg(result_hi)?; + self.push_instr( + make_instr([result_lo, result_hi], lhs_lo), + FuelCostsProvider::base, + )?; + self.push_param(Instruction::register3_ext(lhs_hi, rhs_lo, rhs_hi))?; + Ok(()) + } + + /// Translates a Wasm `i64.mul_wide_sx` instruction from the `wide-arithmetic` proposal. + fn translate_i64_mul_wide_sx( + &mut self, + make_instr: fn(results: FixedRegSpan<2>, lhs: Reg, rhs: Reg) -> Instruction, + const_eval: fn(lhs: i64, rhs: i64) -> (i64, i64), + signed: bool, + ) -> Result<(), Error> { + bail_unreachable!(self); + let (lhs, rhs) = self.stack.pop2(); + let (lhs, rhs) = match (lhs, rhs) { + (Operand::Immediate(lhs), Operand::Immediate(rhs)) => { + let (result_lo, result_hi) = const_eval(lhs.val().into(), rhs.val().into()); + self.stack.push_immediate(result_lo)?; + self.stack.push_immediate(result_hi)?; + return Ok(()); + } + (lhs, Operand::Immediate(rhs)) => { + let rhs = rhs.val(); + if self.try_opt_i64_mul_wide_sx(lhs, rhs, signed)? { + return Ok(()); + } + let lhs = self.layout.operand_to_reg(lhs)?; + let rhs = self.layout.const_to_reg(rhs)?; + (lhs, rhs) + } + (Operand::Immediate(lhs), rhs) => { + let lhs = lhs.val(); + if self.try_opt_i64_mul_wide_sx(rhs, lhs, signed)? { + return Ok(()); + } + let lhs = self.layout.const_to_reg(lhs)?; + let rhs = self.layout.operand_to_reg(rhs)?; + (lhs, rhs) + } + (lhs, rhs) => { + let lhs = self.layout.operand_to_reg(lhs)?; + let rhs = self.layout.operand_to_reg(rhs)?; + (lhs, rhs) + } + }; + let result0 = self.stack.push_temp(ValType::I64, None)?; + let _result1 = self.stack.push_temp(ValType::I64, None)?; + let result0 = self.layout.temp_to_reg(result0)?; + let Ok(results) = >::new(RegSpan::new(result0)) else { + return Err(Error::from(TranslationError::AllocatedTooManyRegisters)); + }; + self.push_instr(make_instr(results, lhs, rhs), FuelCostsProvider::base)?; + Ok(()) + } + + /// Try to optimize a `i64.mul_wide_sx` instruction with one [`Reg`] and one immediate input. + /// + /// - Returns `Ok(true)` if the optimiation was applied successfully. + /// - Returns `Ok(false)` if no optimization was applied. + fn try_opt_i64_mul_wide_sx( + &mut self, + lhs: Operand, + rhs: TypedVal, + signed: bool, + ) -> Result { + let rhs = i64::from(rhs); + if rhs == 0 { + // Case: `mul(x, 0)` or `mul(0, x)` always evaluates to 0. + self.stack.push_immediate(0_i64)?; // lo-bits + self.stack.push_immediate(0_i64)?; // hi-bits + return Ok(true); + } + if rhs == 1 && !signed { + // Case: `mul(x, 1)` or `mul(1, x)` always evaluates to just `x`. + // This is only valid if `x` is not a singed (negative) value. + self.stack.push_operand(lhs)?; // lo-bits + self.stack.push_immediate(0_i64)?; // hi-bits + return Ok(true); + } + Ok(false) + } +} diff --git a/crates/wasmi/src/engine/translator/func2/op.rs b/crates/wasmi/src/engine/translator/func2/op.rs new file mode 100644 index 0000000000..2fa817e748 --- /dev/null +++ b/crates/wasmi/src/engine/translator/func2/op.rs @@ -0,0 +1,160 @@ +use crate::ir::{Address32, Instruction, Offset16, Offset64Lo, Reg}; + +/// Trait implemented by all Wasm operators that can be translated as wrapping store instructions. +pub trait StoreWrapOperator { + /// The type of the value to the stored. + type Value; + /// The type of the wrapped value. + type Wrapped; + /// The type of the value as (at most) 16-bit encoded instruction parameter. + type Param; + + fn store(ptr: Reg, offset_lo: Offset64Lo) -> Instruction; + fn store_imm(ptr: Reg, offset_lo: Offset64Lo) -> Instruction; + fn store_offset16(ptr: Reg, offset: Offset16, value: Reg) -> Instruction; + fn store_offset16_imm(ptr: Reg, offset: Offset16, value: Self::Param) -> Instruction; + fn store_at(value: Reg, address: Address32) -> Instruction; + fn store_at_imm(value: Self::Param, address: Address32) -> Instruction; +} + +macro_rules! impl_store_wrap { + ( $( + impl StoreWrapOperator for $name:ident { + type Value = $value_ty:ty; + type Wrapped = $wrapped_ty:ty; + type Param = $param_ty:ty; + + fn store = $store:expr; + fn store_imm = $store_imm:expr; + fn store_offset16 = $store_offset16:expr; + fn store_offset16_imm = $store_offset16_imm:expr; + fn store_at = $store_at:expr; + fn store_at_imm = $store_at_imm:expr; + } + )* ) => { + $( + pub enum $name {} + impl StoreWrapOperator for $name { + type Value = $value_ty; + type Wrapped = $wrapped_ty; + type Param = $param_ty; + + fn store(ptr: Reg, offset_lo: Offset64Lo) -> Instruction { + $store(ptr, offset_lo) + } + + fn store_imm(ptr: Reg, offset_lo: Offset64Lo) -> Instruction { + $store_imm(ptr, offset_lo) + } + + fn store_offset16(ptr: Reg, offset: Offset16, value: Reg) -> Instruction { + $store_offset16(ptr, offset, value) + } + + fn store_offset16_imm(ptr: Reg, offset: Offset16, value: Self::Param) -> Instruction { + $store_offset16_imm(ptr, offset, value) + } + + fn store_at(value: Reg, address: Address32) -> Instruction { + $store_at(value, address) + } + + fn store_at_imm(value: Self::Param, address: Address32) -> Instruction { + $store_at_imm(value, address) + } + } + )* + }; +} +impl_store_wrap! { + impl StoreWrapOperator for I32Store { + type Value = i32; + type Wrapped = i32; + type Param = i16; + + fn store = Instruction::store32; + fn store_imm = Instruction::i32_store_imm16; + fn store_offset16 = Instruction::store32_offset16; + fn store_offset16_imm = Instruction::i32_store_offset16_imm16; + fn store_at = Instruction::store32_at; + fn store_at_imm = Instruction::i32_store_at_imm16; + } + + impl StoreWrapOperator for I64Store { + type Value = i64; + type Wrapped = i64; + type Param = i16; + + fn store = Instruction::store64; + fn store_imm = Instruction::i64_store_imm16; + fn store_offset16 = Instruction::store64_offset16; + fn store_offset16_imm = Instruction::i64_store_offset16_imm16; + fn store_at = Instruction::store64_at; + fn store_at_imm = Instruction::i64_store_at_imm16; + } + + impl StoreWrapOperator for I32Store8 { + type Value = i32; + type Wrapped = i8; + type Param = i8; + + fn store = Instruction::i32_store8; + fn store_imm = Instruction::i32_store8_imm; + fn store_offset16 = Instruction::i32_store8_offset16; + fn store_offset16_imm = Instruction::i32_store8_offset16_imm; + fn store_at = Instruction::i32_store8_at; + fn store_at_imm = Instruction::i32_store8_at_imm; + } + + impl StoreWrapOperator for I32Store16 { + type Value = i32; + type Wrapped = i16; + type Param = i16; + + fn store = Instruction::i32_store16; + fn store_imm = Instruction::i32_store16_imm; + fn store_offset16 = Instruction::i32_store16_offset16; + fn store_offset16_imm = Instruction::i32_store16_offset16_imm; + fn store_at = Instruction::i32_store16_at; + fn store_at_imm = Instruction::i32_store16_at_imm; + } + + impl StoreWrapOperator for I64Store8 { + type Value = i64; + type Wrapped = i8; + type Param = i8; + + fn store = Instruction::i64_store8; + fn store_imm = Instruction::i64_store8_imm; + fn store_offset16 = Instruction::i64_store8_offset16; + fn store_offset16_imm = Instruction::i64_store8_offset16_imm; + fn store_at = Instruction::i64_store8_at; + fn store_at_imm = Instruction::i64_store8_at_imm; + } + + impl StoreWrapOperator for I64Store16 { + type Value = i64; + type Wrapped = i16; + type Param = i16; + + fn store = Instruction::i64_store16; + fn store_imm = Instruction::i64_store16_imm; + fn store_offset16 = Instruction::i64_store16_offset16; + fn store_offset16_imm = Instruction::i64_store16_offset16_imm; + fn store_at = Instruction::i64_store16_at; + fn store_at_imm = Instruction::i64_store16_at_imm; + } + + impl StoreWrapOperator for I64Store32 { + type Value = i64; + type Wrapped = i32; + type Param = i16; + + fn store = Instruction::i64_store32; + fn store_imm = Instruction::i64_store32_imm16; + fn store_offset16 = Instruction::i64_store32_offset16; + fn store_offset16_imm = Instruction::i64_store32_offset16_imm16; + fn store_at = Instruction::i64_store32_at; + fn store_at_imm = Instruction::i64_store32_at_imm16; + } +} diff --git a/crates/wasmi/src/engine/translator/func2/simd/mod.rs b/crates/wasmi/src/engine/translator/func2/simd/mod.rs new file mode 100644 index 0000000000..22b87ca045 --- /dev/null +++ b/crates/wasmi/src/engine/translator/func2/simd/mod.rs @@ -0,0 +1,387 @@ +use super::FuncTranslator; + +mod visit; + +use crate::{ + core::{simd::IntoLaneIdx, FuelCostsProvider, TrapCode, Typed, TypedVal, ValType, V128}, + engine::translator::{ + func2::Operand, + utils::{Instr, Wrap}, + }, + ir::{ + index, + index::Memory, + Address32, + Instruction, + IntoShiftAmount, + Offset64, + Offset64Lo, + Offset8, + Reg, + }, + Error, +}; +use wasmparser::MemArg; + +impl FuncTranslator { + /// Generically translate any of the Wasm `simd` splat instructions. + fn translate_simd_splat( + &mut self, + make_instr: fn(result: Reg, value: Reg) -> Instruction, + const_eval: fn(Wrapped) -> V128, + ) -> Result<(), Error> + where + T: From + Wrap, + { + bail_unreachable!(self); + let value = self.stack.pop(); + if let Operand::Immediate(value) = value { + let value = T::from(value.val()).wrap(); + let result = const_eval(value); + self.stack.push_immediate(result)?; + return Ok(()); + }; + let value = self.layout.operand_to_reg(value)?; + self.push_instr_with_result( + ValType::V128, + |result| make_instr(result, value), + FuelCostsProvider::simd, + )?; + Ok(()) + } + + /// Generically translate any of the Wasm `simd` extract lane instructions. + fn translate_extract_lane( + &mut self, + lane: u8, + make_instr: fn(result: Reg, input: Reg, lane: T::LaneIdx) -> Instruction, + const_eval: fn(input: V128, lane: T::LaneIdx) -> R, + ) -> Result<(), Error> + where + R: Into + Typed, + { + bail_unreachable!(self); + let Ok(lane) = ::try_from(lane) else { + panic!("encountered out of bounds lane index: {lane}") + }; + let input = self.stack.pop(); + if let Operand::Immediate(input) = input { + let result = const_eval(input.val().into(), lane); + self.stack.push_immediate(result)?; + return Ok(()); + }; + let input = self.layout.operand_to_reg(input)?; + self.push_instr_with_result( + ::TY, + |result| make_instr(result, input, lane), + FuelCostsProvider::simd, + )?; + Ok(()) + } + + /// Generically translate a Wasm unary instruction. + fn translate_simd_unary( + &mut self, + make_instr: fn(result: Reg, input: Reg) -> Instruction, + const_eval: fn(input: V128) -> T, + ) -> Result<(), Error> + where + T: Into, + { + bail_unreachable!(self); + let input = self.stack.pop(); + if let Operand::Immediate(input) = input { + // Case: the input is an immediate so we can const-eval the result. + let result = const_eval(input.val().into()); + self.stack.push_immediate(result)?; + return Ok(()); + }; + let input = self.layout.operand_to_reg(input)?; + self.push_instr_with_result( + ValType::V128, + |result| make_instr(result, input), + FuelCostsProvider::simd, + )?; + Ok(()) + } + + /// Generically translate a Wasm binary instruction. + fn translate_simd_binary( + &mut self, + make_instr: fn(result: Reg, lhs: Reg, rhs: Reg) -> Instruction, + const_eval: fn(lhs: V128, rhs: V128) -> V128, + ) -> Result<(), Error> { + bail_unreachable!(self); + let (lhs, rhs) = self.stack.pop2(); + if let (Operand::Immediate(lhs), Operand::Immediate(rhs)) = (lhs, rhs) { + // Case: both inputs are immediates so we can const-eval the result. + let result = const_eval(lhs.val().into(), rhs.val().into()); + self.stack.push_immediate(result)?; + return Ok(()); + } + let lhs = self.layout.operand_to_reg(lhs)?; + let rhs = self.layout.operand_to_reg(rhs)?; + self.push_instr_with_result( + ValType::V128, + |result| make_instr(result, lhs, rhs), + FuelCostsProvider::simd, + )?; + Ok(()) + } + + /// Generically translate a Wasm ternary instruction. + fn translate_simd_ternary( + &mut self, + make_instr: fn(result: Reg, a: Reg, b: Reg) -> Instruction, + const_eval: fn(lhas: V128, b: V128, c: V128) -> V128, + ) -> Result<(), Error> { + bail_unreachable!(self); + let (a, b, c) = self.stack.pop3(); + if let (Operand::Immediate(lhs), Operand::Immediate(rhs), Operand::Immediate(c)) = (a, b, c) + { + // Case: all inputs are immediates so we can const-eval the result. + let result = const_eval(lhs.val().into(), rhs.val().into(), c.val().into()); + self.stack.push_immediate(result)?; + return Ok(()); + } + let lhs = self.layout.operand_to_reg(a)?; + let rhs = self.layout.operand_to_reg(b)?; + let selector = self.layout.operand_to_reg(c)?; + self.push_instr_with_result( + ValType::V128, + |result| make_instr(result, lhs, rhs), + FuelCostsProvider::simd, + )?; + self.push_param(Instruction::register(selector))?; + Ok(()) + } + + /// Generically translate a Wasm SIMD shift instruction. + fn translate_simd_shift( + &mut self, + make_instr: fn(result: Reg, lhs: Reg, rhs: Reg) -> Instruction, + make_instr_imm: fn( + result: Reg, + lhs: Reg, + rhs: ::Output, + ) -> Instruction, + const_eval: fn(lhs: V128, rhs: u32) -> V128, + ) -> Result<(), Error> + where + T: IntoShiftAmount>, + { + bail_unreachable!(self); + let (lhs, rhs) = self.stack.pop2(); + if let (Operand::Immediate(lhs), Operand::Immediate(rhs)) = (lhs, rhs) { + // Case: both inputs are immediates so we can const-eval the result. + let result = const_eval(lhs.val().into(), rhs.val().into()); + self.stack.push_immediate(result)?; + return Ok(()); + } + if let Operand::Immediate(rhs) = rhs { + let Some(rhs) = T::into_shift_amount(rhs.val().into()) else { + // Case: the shift operation is a no-op + self.stack.push_operand(lhs)?; + return Ok(()); + }; + let lhs = self.layout.operand_to_reg(lhs)?; + self.push_instr_with_result( + ValType::V128, + |result| make_instr_imm(result, lhs, rhs), + FuelCostsProvider::simd, + )?; + return Ok(()); + } + let lhs = self.layout.operand_to_reg(lhs)?; + let rhs = self.layout.operand_to_reg(rhs)?; + self.push_instr_with_result( + ValType::V128, + |result| make_instr(result, lhs, rhs), + FuelCostsProvider::simd, + )?; + Ok(()) + } + + fn translate_v128_load_lane( + &mut self, + memarg: MemArg, + lane: u8, + make_instr: fn(result: Reg, offset_lo: Offset64Lo) -> Instruction, + make_instr_at: fn(result: Reg, address: Address32) -> Instruction, + ) -> Result<(), Error> { + bail_unreachable!(self); + let (memory, offset) = Self::decode_memarg(memarg); + let Ok(lane) = ::try_from(lane) else { + panic!("encountered out of bounds lane: {lane}"); + }; + let (ptr, x) = self.stack.pop2(); + let x = self.layout.operand_to_reg(x)?; + let (ptr, offset) = match ptr { + Operand::Immediate(ptr) => { + let Some(address) = self.effective_address(memory, ptr.val(), offset) else { + return self.translate_trap(TrapCode::MemoryOutOfBounds); + }; + if let Ok(address) = Address32::try_from(address) { + return self.translate_v128_load_lane_at::( + memory, + x, + lane, + address, + make_instr_at, + ); + } + let zero_ptr = self.layout.const_to_reg(0_u64)?; + (zero_ptr, u64::from(address)) + } + ptr => { + let ptr = self.layout.operand_to_reg(ptr)?; + (ptr, offset) + } + }; + let (offset_hi, offset_lo) = Offset64::split(offset); + self.push_instr_with_result( + ::TY, + |result| make_instr(result, offset_lo), + FuelCostsProvider::load, + )?; + self.push_param(Instruction::register_and_offset_hi(ptr, offset_hi))?; + self.push_param(Instruction::register_and_lane(x, lane))?; + if !memory.is_default() { + self.push_param(Instruction::memory_index(memory))?; + } + Ok(()) + } + + fn translate_v128_load_lane_at( + &mut self, + memory: Memory, + x: Reg, + lane: LaneType, + address: Address32, + make_instr_at: fn(result: Reg, address: Address32) -> Instruction, + ) -> Result<(), Error> + where + LaneType: Into, + { + self.push_instr_with_result( + ::TY, + |result| make_instr_at(result, address), + FuelCostsProvider::load, + )?; + self.push_param(Instruction::register_and_lane(x, lane))?; + if !memory.is_default() { + self.push_param(Instruction::memory_index(memory))?; + } + Ok(()) + } + + #[allow(clippy::type_complexity)] + fn translate_v128_store_lane( + &mut self, + memarg: MemArg, + lane: u8, + make_instr: fn(ptr: Reg, offset_lo: Offset64Lo) -> Instruction, + make_instr_offset8: fn( + ptr: Reg, + value: Reg, + offset: Offset8, + lane: T::LaneIdx, + ) -> Instruction, + make_instr_at: fn(value: Reg, address: Address32) -> Instruction, + translate_imm: fn( + &mut Self, + memarg: MemArg, + ptr: Operand, + lane: T::LaneIdx, + value: V128, + ) -> Result<(), Error>, + ) -> Result<(), Error> { + bail_unreachable!(self); + let Ok(lane) = ::try_from(lane) else { + panic!("encountered out of bounds lane index: {lane}"); + }; + let (ptr, v128) = self.stack.pop2(); + let v128 = match v128 { + Operand::Immediate(v128) => { + // Case: with `v128` being an immediate value we can extract its + // lane value and translate as a more efficient non-SIMD operation. + return translate_imm(self, memarg, ptr, lane, V128::from(v128.val())); + } + v128 => self.layout.operand_to_reg(v128)?, + }; + let (memory, offset) = Self::decode_memarg(memarg); + let (ptr, offset) = match ptr { + Operand::Immediate(ptr) => { + let Some(address) = self.effective_address(memory, ptr.val(), offset) else { + return self.translate_trap(TrapCode::MemoryOutOfBounds); + }; + if let Ok(address) = Address32::try_from(address) { + return self.translate_v128_store_lane_at::( + memory, + address, + v128, + lane, + make_instr_at, + ); + } + // Case: we cannot use specialized encoding and thus have to fall back + // to the general case where `ptr` is zero and `offset` stores the + // `ptr+offset` address value. + let zero_ptr = self.layout.const_to_reg(0_u64)?; + (zero_ptr, u64::from(address)) + } + ptr => { + let ptr = self.layout.operand_to_reg(ptr)?; + (ptr, offset) + } + }; + if let Ok(Some(_)) = + self.translate_v128_store_lane_mem0(memory, ptr, offset, v128, lane, make_instr_offset8) + { + return Ok(()); + } + let (offset_hi, offset_lo) = Offset64::split(offset); + let instr = make_instr(ptr, offset_lo); + let param = Instruction::register_and_offset_hi(v128, offset_hi); + let param2 = Instruction::lane_and_memory_index(lane, memory); + self.push_instr(instr, FuelCostsProvider::store)?; + self.push_param(param)?; + self.push_param(param2)?; + Ok(()) + } + + fn translate_v128_store_lane_at( + &mut self, + memory: index::Memory, + address: Address32, + value: Reg, + lane: T::LaneIdx, + make_instr_at: fn(value: Reg, address: Address32) -> Instruction, + ) -> Result<(), Error> { + self.push_instr(make_instr_at(value, address), FuelCostsProvider::store)?; + self.push_param(Instruction::lane_and_memory_index(lane, memory))?; + Ok(()) + } + + fn translate_v128_store_lane_mem0( + &mut self, + memory: Memory, + ptr: Reg, + offset: u64, + value: Reg, + lane: LaneType, + make_instr_offset8: fn(Reg, Reg, Offset8, LaneType) -> Instruction, + ) -> Result, Error> { + if !memory.is_default() { + return Ok(None); + } + let Ok(offset8) = Offset8::try_from(offset) else { + return Ok(None); + }; + let instr = self.push_instr( + make_instr_offset8(ptr, value, offset8, lane), + FuelCostsProvider::store, + )?; + Ok(Some(instr)) + } +} diff --git a/crates/wasmi/src/engine/translator/func2/simd/visit.rs b/crates/wasmi/src/engine/translator/func2/simd/visit.rs new file mode 100644 index 0000000000..422673c2e5 --- /dev/null +++ b/crates/wasmi/src/engine/translator/func2/simd/visit.rs @@ -0,0 +1,1423 @@ +use super::FuncTranslator; +use crate::{ + core::{simd, simd::ImmLaneIdx32, FuelCostsProvider, ValType, V128}, + engine::translator::func2::Operand, + ir::{Instruction, Reg}, +}; +use core::array; +use wasmparser::{MemArg, VisitSimdOperator}; + +/// Used to swap operands of binary [`Instruction`] constructor. +macro_rules! swap_ops { + ($fn_name:path) => { + |result: Reg, lhs, rhs| -> Instruction { $fn_name(result, rhs, lhs) } + }; +} + +impl VisitSimdOperator<'_> for FuncTranslator { + fn visit_v128_load(&mut self, memarg: MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::V128, + Instruction::v128_load, + Instruction::v128_load_offset16, + Instruction::v128_load_at, + ) + } + + fn visit_v128_load8x8_s(&mut self, memarg: MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::V128, + Instruction::v128_load8x8_s, + Instruction::v128_load8x8_s_offset16, + Instruction::v128_load8x8_s_at, + ) + } + + fn visit_v128_load8x8_u(&mut self, memarg: MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::V128, + Instruction::v128_load8x8_u, + Instruction::v128_load8x8_u_offset16, + Instruction::v128_load8x8_u_at, + ) + } + + fn visit_v128_load16x4_s(&mut self, memarg: MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::V128, + Instruction::v128_load16x4_s, + Instruction::v128_load16x4_s_offset16, + Instruction::v128_load16x4_s_at, + ) + } + + fn visit_v128_load16x4_u(&mut self, memarg: MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::V128, + Instruction::v128_load16x4_u, + Instruction::v128_load16x4_u_offset16, + Instruction::v128_load16x4_u_at, + ) + } + + fn visit_v128_load32x2_s(&mut self, memarg: MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::V128, + Instruction::v128_load32x2_s, + Instruction::v128_load32x2_s_offset16, + Instruction::v128_load32x2_s_at, + ) + } + + fn visit_v128_load32x2_u(&mut self, memarg: MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::V128, + Instruction::v128_load32x2_u, + Instruction::v128_load32x2_u_offset16, + Instruction::v128_load32x2_u_at, + ) + } + + fn visit_v128_load8_splat(&mut self, memarg: MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::V128, + Instruction::v128_load8_splat, + Instruction::v128_load8_splat_offset16, + Instruction::v128_load8_splat_at, + ) + } + + fn visit_v128_load16_splat(&mut self, memarg: MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::V128, + Instruction::v128_load16_splat, + Instruction::v128_load16_splat_offset16, + Instruction::v128_load16_splat_at, + ) + } + + fn visit_v128_load32_splat(&mut self, memarg: MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::V128, + Instruction::v128_load32_splat, + Instruction::v128_load32_splat_offset16, + Instruction::v128_load32_splat_at, + ) + } + + fn visit_v128_load64_splat(&mut self, memarg: MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::V128, + Instruction::v128_load64_splat, + Instruction::v128_load64_splat_offset16, + Instruction::v128_load64_splat_at, + ) + } + + fn visit_v128_load32_zero(&mut self, memarg: MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::V128, + Instruction::v128_load32_zero, + Instruction::v128_load32_zero_offset16, + Instruction::v128_load32_zero_at, + ) + } + + fn visit_v128_load64_zero(&mut self, memarg: MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::V128, + Instruction::v128_load64_zero, + Instruction::v128_load64_zero_offset16, + Instruction::v128_load64_zero_at, + ) + } + + fn visit_v128_store(&mut self, memarg: MemArg) -> Self::Output { + self.translate_store( + memarg, + Instruction::v128_store, + Instruction::v128_store_offset16, + Instruction::v128_store_at, + ) + } + + fn visit_v128_load8_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output { + self.translate_v128_load_lane::( + memarg, + lane, + Instruction::v128_load8_lane, + Instruction::v128_load8_lane_at, + ) + } + + fn visit_v128_load16_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output { + self.translate_v128_load_lane::( + memarg, + lane, + Instruction::v128_load16_lane, + Instruction::v128_load16_lane_at, + ) + } + + fn visit_v128_load32_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output { + self.translate_v128_load_lane::( + memarg, + lane, + Instruction::v128_load32_lane, + Instruction::v128_load32_lane_at, + ) + } + + fn visit_v128_load64_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output { + self.translate_v128_load_lane::( + memarg, + lane, + Instruction::v128_load64_lane, + Instruction::v128_load64_lane_at, + ) + } + + fn visit_v128_store8_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output { + self.translate_v128_store_lane::( + memarg, + lane, + Instruction::v128_store8_lane, + Instruction::v128_store8_lane_offset8, + Instruction::v128_store8_lane_at, + |_this, _memarg, _ptr, lane, v128| { + let _value = simd::i8x16_extract_lane_s(v128, lane); + todo!() + }, + ) + } + + fn visit_v128_store16_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output { + self.translate_v128_store_lane::( + memarg, + lane, + Instruction::v128_store16_lane, + Instruction::v128_store16_lane_offset8, + Instruction::v128_store16_lane_at, + |_this, _memarg, _ptr, lane, v128| { + let _value = simd::i16x8_extract_lane_s(v128, lane); + todo!() + }, + ) + } + + fn visit_v128_store32_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output { + self.translate_v128_store_lane::( + memarg, + lane, + Instruction::v128_store32_lane, + Instruction::v128_store32_lane_offset8, + Instruction::v128_store32_lane_at, + |_this, _memarg, _ptr, lane, v128| { + let _value = simd::i32x4_extract_lane(v128, lane); + todo!() + }, + ) + } + + fn visit_v128_store64_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output { + self.translate_v128_store_lane::( + memarg, + lane, + Instruction::v128_store64_lane, + Instruction::v128_store64_lane_offset8, + Instruction::v128_store64_lane_at, + |_this, _memarg, _ptr, lane, v128| { + let _value = simd::i64x2_extract_lane(v128, lane); + todo!() + }, + ) + } + + fn visit_v128_const(&mut self, value: wasmparser::V128) -> Self::Output { + bail_unreachable!(self); + let v128 = V128::from(value.i128() as u128); + self.stack.push_immediate(v128)?; + Ok(()) + } + + fn visit_i8x16_shuffle(&mut self, lanes: [u8; 16]) -> Self::Output { + bail_unreachable!(self); + let selector: [ImmLaneIdx32; 16] = array::from_fn(|i| { + let Ok(lane) = ImmLaneIdx32::try_from(lanes[i]) else { + panic!("encountered out of bounds lane at index {i}: {}", lanes[i]) + }; + lane + }); + let (lhs, rhs) = self.stack.pop2(); + if let (Operand::Immediate(lhs), Operand::Immediate(rhs)) = (lhs, rhs) { + let result = simd::i8x16_shuffle(lhs.val().into(), rhs.val().into(), selector); + self.stack.push_immediate(result)?; + return Ok(()); + } + let lhs = self.layout.operand_to_reg(lhs)?; + let rhs = self.layout.operand_to_reg(rhs)?; + let selector = self + .layout + .const_to_reg(V128::from(u128::from_ne_bytes(lanes)))?; + self.push_instr_with_result( + ValType::V128, + |result| Instruction::i8x16_shuffle(result, lhs, rhs), + FuelCostsProvider::simd, + )?; + self.push_param(Instruction::register(selector))?; + Ok(()) + } + + fn visit_i8x16_extract_lane_s(&mut self, lane: u8) -> Self::Output { + self.translate_extract_lane::( + lane, + Instruction::i8x16_extract_lane_s, + simd::i8x16_extract_lane_s, + ) + } + + fn visit_i8x16_extract_lane_u(&mut self, lane: u8) -> Self::Output { + self.translate_extract_lane::( + lane, + Instruction::i8x16_extract_lane_u, + simd::i8x16_extract_lane_u, + ) + } + + fn visit_i16x8_extract_lane_s(&mut self, lane: u8) -> Self::Output { + self.translate_extract_lane::( + lane, + Instruction::i16x8_extract_lane_s, + simd::i16x8_extract_lane_s, + ) + } + + fn visit_i16x8_extract_lane_u(&mut self, lane: u8) -> Self::Output { + self.translate_extract_lane::( + lane, + Instruction::i16x8_extract_lane_u, + simd::i16x8_extract_lane_u, + ) + } + + fn visit_i32x4_extract_lane(&mut self, lane: u8) -> Self::Output { + self.translate_extract_lane::( + lane, + Instruction::i32x4_extract_lane, + simd::i32x4_extract_lane, + ) + } + + fn visit_i64x2_extract_lane(&mut self, lane: u8) -> Self::Output { + self.translate_extract_lane::( + lane, + Instruction::i64x2_extract_lane, + simd::i64x2_extract_lane, + ) + } + + fn visit_f32x4_extract_lane(&mut self, lane: u8) -> Self::Output { + self.translate_extract_lane::( + lane, + Instruction::f32x4_extract_lane, + simd::f32x4_extract_lane, + ) + } + + fn visit_f64x2_extract_lane(&mut self, lane: u8) -> Self::Output { + self.translate_extract_lane::( + lane, + Instruction::f64x2_extract_lane, + simd::f64x2_extract_lane, + ) + } + + fn visit_i8x16_replace_lane(&mut self, _lane: u8) -> Self::Output { + todo!() + } + + fn visit_i16x8_replace_lane(&mut self, _lane: u8) -> Self::Output { + todo!() + } + + fn visit_i32x4_replace_lane(&mut self, _lane: u8) -> Self::Output { + todo!() + } + + fn visit_i64x2_replace_lane(&mut self, _lane: u8) -> Self::Output { + todo!() + } + + fn visit_f32x4_replace_lane(&mut self, _lane: u8) -> Self::Output { + todo!() + } + + fn visit_f64x2_replace_lane(&mut self, _lane: u8) -> Self::Output { + todo!() + } + + fn visit_i8x16_swizzle(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i8x16_swizzle, simd::i8x16_swizzle) + } + + fn visit_i8x16_splat(&mut self) -> Self::Output { + self.translate_simd_splat::(Instruction::i8x16_splat, simd::i8x16_splat) + } + + fn visit_i16x8_splat(&mut self) -> Self::Output { + self.translate_simd_splat::(Instruction::i16x8_splat, simd::i16x8_splat) + } + + fn visit_i32x4_splat(&mut self) -> Self::Output { + self.translate_simd_splat::(Instruction::i32x4_splat, simd::i32x4_splat) + } + + fn visit_i64x2_splat(&mut self) -> Self::Output { + self.translate_simd_splat::(Instruction::i64x2_splat, simd::i64x2_splat) + } + + fn visit_f32x4_splat(&mut self) -> Self::Output { + self.translate_simd_splat::(Instruction::f32x4_splat, simd::f32x4_splat) + } + + fn visit_f64x2_splat(&mut self) -> Self::Output { + self.translate_simd_splat::(Instruction::f64x2_splat, simd::f64x2_splat) + } + + fn visit_i8x16_eq(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i8x16_eq, simd::i8x16_eq) + } + + fn visit_i8x16_ne(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i8x16_ne, simd::i8x16_ne) + } + + fn visit_i8x16_lt_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i8x16_lt_s, simd::i8x16_lt_s) + } + + fn visit_i8x16_lt_u(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i8x16_lt_u, simd::i8x16_lt_u) + } + + fn visit_i8x16_gt_s(&mut self) -> Self::Output { + self.translate_simd_binary(swap_ops!(Instruction::i8x16_lt_s), simd::i8x16_gt_s) + } + + fn visit_i8x16_gt_u(&mut self) -> Self::Output { + self.translate_simd_binary(swap_ops!(Instruction::i8x16_lt_u), simd::i8x16_gt_u) + } + + fn visit_i8x16_le_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i8x16_le_s, simd::i8x16_le_s) + } + + fn visit_i8x16_le_u(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i8x16_le_u, simd::i8x16_le_u) + } + + fn visit_i8x16_ge_s(&mut self) -> Self::Output { + self.translate_simd_binary(swap_ops!(Instruction::i8x16_le_s), simd::i8x16_ge_s) + } + + fn visit_i8x16_ge_u(&mut self) -> Self::Output { + self.translate_simd_binary(swap_ops!(Instruction::i8x16_le_u), simd::i8x16_ge_u) + } + + fn visit_i16x8_eq(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_eq, simd::i16x8_eq) + } + + fn visit_i16x8_ne(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_ne, simd::i16x8_ne) + } + + fn visit_i16x8_lt_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_lt_s, simd::i16x8_lt_s) + } + + fn visit_i16x8_lt_u(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_lt_u, simd::i16x8_lt_u) + } + + fn visit_i16x8_gt_s(&mut self) -> Self::Output { + self.translate_simd_binary(swap_ops!(Instruction::i16x8_lt_s), simd::i16x8_gt_s) + } + + fn visit_i16x8_gt_u(&mut self) -> Self::Output { + self.translate_simd_binary(swap_ops!(Instruction::i16x8_lt_u), simd::i16x8_gt_u) + } + + fn visit_i16x8_le_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_le_s, simd::i16x8_le_s) + } + + fn visit_i16x8_le_u(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_le_u, simd::i16x8_le_u) + } + + fn visit_i16x8_ge_s(&mut self) -> Self::Output { + self.translate_simd_binary(swap_ops!(Instruction::i16x8_le_s), simd::i16x8_ge_s) + } + + fn visit_i16x8_ge_u(&mut self) -> Self::Output { + self.translate_simd_binary(swap_ops!(Instruction::i16x8_le_u), simd::i16x8_ge_u) + } + + fn visit_i32x4_eq(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i32x4_eq, simd::i32x4_eq) + } + + fn visit_i32x4_ne(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i32x4_ne, simd::i32x4_ne) + } + + fn visit_i32x4_lt_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i32x4_lt_s, simd::i32x4_lt_s) + } + + fn visit_i32x4_lt_u(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i32x4_lt_u, simd::i32x4_lt_u) + } + + fn visit_i32x4_gt_s(&mut self) -> Self::Output { + self.translate_simd_binary(swap_ops!(Instruction::i32x4_lt_s), simd::i32x4_gt_s) + } + + fn visit_i32x4_gt_u(&mut self) -> Self::Output { + self.translate_simd_binary(swap_ops!(Instruction::i32x4_lt_u), simd::i32x4_gt_u) + } + + fn visit_i32x4_le_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i32x4_le_s, simd::i32x4_le_s) + } + + fn visit_i32x4_le_u(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i32x4_le_u, simd::i32x4_le_u) + } + + fn visit_i32x4_ge_s(&mut self) -> Self::Output { + self.translate_simd_binary(swap_ops!(Instruction::i32x4_le_s), simd::i32x4_ge_s) + } + + fn visit_i32x4_ge_u(&mut self) -> Self::Output { + self.translate_simd_binary(swap_ops!(Instruction::i32x4_le_u), simd::i32x4_ge_u) + } + + fn visit_i64x2_eq(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i64x2_eq, simd::i64x2_eq) + } + + fn visit_i64x2_ne(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i64x2_ne, simd::i64x2_ne) + } + + fn visit_i64x2_lt_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i64x2_lt_s, simd::i64x2_lt_s) + } + + fn visit_i64x2_gt_s(&mut self) -> Self::Output { + self.translate_simd_binary(swap_ops!(Instruction::i64x2_lt_s), simd::i64x2_gt_s) + } + + fn visit_i64x2_le_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i64x2_le_s, simd::i64x2_le_s) + } + + fn visit_i64x2_ge_s(&mut self) -> Self::Output { + self.translate_simd_binary(swap_ops!(Instruction::i64x2_le_s), simd::i64x2_ge_s) + } + + fn visit_f32x4_eq(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f32x4_eq, simd::f32x4_eq) + } + + fn visit_f32x4_ne(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f32x4_ne, simd::f32x4_ne) + } + + fn visit_f32x4_lt(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f32x4_lt, simd::f32x4_lt) + } + + fn visit_f32x4_gt(&mut self) -> Self::Output { + self.translate_simd_binary(swap_ops!(Instruction::f32x4_lt), simd::f32x4_gt) + } + + fn visit_f32x4_le(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f32x4_le, simd::f32x4_le) + } + + fn visit_f32x4_ge(&mut self) -> Self::Output { + self.translate_simd_binary(swap_ops!(Instruction::f32x4_le), simd::f32x4_ge) + } + + fn visit_f64x2_eq(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f64x2_eq, simd::f64x2_eq) + } + + fn visit_f64x2_ne(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f64x2_ne, simd::f64x2_ne) + } + + fn visit_f64x2_lt(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f64x2_lt, simd::f64x2_lt) + } + + fn visit_f64x2_gt(&mut self) -> Self::Output { + self.translate_simd_binary(swap_ops!(Instruction::f64x2_lt), simd::f64x2_gt) + } + + fn visit_f64x2_le(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f64x2_le, simd::f64x2_le) + } + + fn visit_f64x2_ge(&mut self) -> Self::Output { + self.translate_simd_binary(swap_ops!(Instruction::f64x2_le), simd::f64x2_ge) + } + + fn visit_v128_not(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::v128_not, simd::v128_not) + } + + fn visit_v128_and(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::v128_and, simd::v128_and) + } + + fn visit_v128_andnot(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::v128_andnot, simd::v128_andnot) + } + + fn visit_v128_or(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::v128_or, simd::v128_or) + } + + fn visit_v128_xor(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::v128_xor, simd::v128_xor) + } + + fn visit_v128_bitselect(&mut self) -> Self::Output { + self.translate_simd_ternary(Instruction::v128_bitselect, simd::v128_bitselect) + } + + fn visit_v128_any_true(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::v128_any_true, simd::v128_any_true) + } + + fn visit_i8x16_abs(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::i8x16_abs, simd::i8x16_abs) + } + + fn visit_i8x16_neg(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::i8x16_neg, simd::i8x16_neg) + } + + fn visit_i8x16_popcnt(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::i8x16_popcnt, simd::i8x16_popcnt) + } + + fn visit_i8x16_all_true(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::i8x16_all_true, simd::i8x16_all_true) + } + + fn visit_i8x16_bitmask(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::i8x16_bitmask, simd::i8x16_bitmask) + } + + fn visit_i8x16_narrow_i16x8_s(&mut self) -> Self::Output { + self.translate_simd_binary( + Instruction::i8x16_narrow_i16x8_s, + simd::i8x16_narrow_i16x8_s, + ) + } + + fn visit_i8x16_narrow_i16x8_u(&mut self) -> Self::Output { + self.translate_simd_binary( + Instruction::i8x16_narrow_i16x8_u, + simd::i8x16_narrow_i16x8_u, + ) + } + + fn visit_i8x16_shl(&mut self) -> Self::Output { + self.translate_simd_shift::( + Instruction::i8x16_shl, + Instruction::i8x16_shl_by, + simd::i8x16_shl, + ) + } + + fn visit_i8x16_shr_s(&mut self) -> Self::Output { + self.translate_simd_shift::( + Instruction::i8x16_shr_s, + Instruction::i8x16_shr_s_by, + simd::i8x16_shr_s, + ) + } + + fn visit_i8x16_shr_u(&mut self) -> Self::Output { + self.translate_simd_shift::( + Instruction::i8x16_shr_u, + Instruction::i8x16_shr_u_by, + simd::i8x16_shr_u, + ) + } + + fn visit_i8x16_add(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i8x16_add, simd::i8x16_add) + } + + fn visit_i8x16_add_sat_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i8x16_add_sat_s, simd::i8x16_add_sat_s) + } + + fn visit_i8x16_add_sat_u(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i8x16_add_sat_u, simd::i8x16_add_sat_u) + } + + fn visit_i8x16_sub(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i8x16_sub, simd::i8x16_sub) + } + + fn visit_i8x16_sub_sat_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i8x16_sub_sat_s, simd::i8x16_sub_sat_s) + } + + fn visit_i8x16_sub_sat_u(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i8x16_sub_sat_u, simd::i8x16_sub_sat_u) + } + + fn visit_i8x16_min_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i8x16_min_s, simd::i8x16_min_s) + } + + fn visit_i8x16_min_u(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i8x16_min_u, simd::i8x16_min_u) + } + + fn visit_i8x16_max_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i8x16_max_s, simd::i8x16_max_s) + } + + fn visit_i8x16_max_u(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i8x16_max_u, simd::i8x16_max_u) + } + + fn visit_i8x16_avgr_u(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i8x16_avgr_u, simd::i8x16_avgr_u) + } + + fn visit_i16x8_extadd_pairwise_i8x16_s(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i16x8_extadd_pairwise_i8x16_s, + simd::i16x8_extadd_pairwise_i8x16_s, + ) + } + + fn visit_i16x8_extadd_pairwise_i8x16_u(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i16x8_extadd_pairwise_i8x16_u, + simd::i16x8_extadd_pairwise_i8x16_u, + ) + } + + fn visit_i16x8_abs(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::i16x8_abs, simd::i16x8_abs) + } + + fn visit_i16x8_neg(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::i16x8_neg, simd::i16x8_neg) + } + + fn visit_i16x8_q15mulr_sat_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_q15mulr_sat_s, simd::i16x8_q15mulr_sat_s) + } + + fn visit_i16x8_all_true(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::i16x8_all_true, simd::i16x8_all_true) + } + + fn visit_i16x8_bitmask(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::i16x8_bitmask, simd::i16x8_bitmask) + } + + fn visit_i16x8_narrow_i32x4_s(&mut self) -> Self::Output { + self.translate_simd_binary( + Instruction::i16x8_narrow_i32x4_s, + simd::i16x8_narrow_i32x4_s, + ) + } + + fn visit_i16x8_narrow_i32x4_u(&mut self) -> Self::Output { + self.translate_simd_binary( + Instruction::i16x8_narrow_i32x4_u, + simd::i16x8_narrow_i32x4_u, + ) + } + + fn visit_i16x8_extend_low_i8x16_s(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i16x8_extend_low_i8x16_s, + simd::i16x8_extend_low_i8x16_s, + ) + } + + fn visit_i16x8_extend_high_i8x16_s(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i16x8_extend_high_i8x16_s, + simd::i16x8_extend_high_i8x16_s, + ) + } + + fn visit_i16x8_extend_low_i8x16_u(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i16x8_extend_low_i8x16_u, + simd::i16x8_extend_low_i8x16_u, + ) + } + + fn visit_i16x8_extend_high_i8x16_u(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i16x8_extend_high_i8x16_u, + simd::i16x8_extend_high_i8x16_u, + ) + } + + fn visit_i16x8_shl(&mut self) -> Self::Output { + self.translate_simd_shift::( + Instruction::i16x8_shl, + Instruction::i16x8_shl_by, + simd::i16x8_shl, + ) + } + + fn visit_i16x8_shr_s(&mut self) -> Self::Output { + self.translate_simd_shift::( + Instruction::i16x8_shr_s, + Instruction::i16x8_shr_s_by, + simd::i16x8_shr_s, + ) + } + + fn visit_i16x8_shr_u(&mut self) -> Self::Output { + self.translate_simd_shift::( + Instruction::i16x8_shr_u, + Instruction::i16x8_shr_u_by, + simd::i16x8_shr_u, + ) + } + + fn visit_i16x8_add(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_add, simd::i16x8_add) + } + + fn visit_i16x8_add_sat_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_add_sat_s, simd::i16x8_add_sat_s) + } + + fn visit_i16x8_add_sat_u(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_add_sat_u, simd::i16x8_add_sat_u) + } + + fn visit_i16x8_sub(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_sub, simd::i16x8_sub) + } + + fn visit_i16x8_sub_sat_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_sub_sat_s, simd::i16x8_sub_sat_s) + } + + fn visit_i16x8_sub_sat_u(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_sub_sat_u, simd::i16x8_sub_sat_u) + } + + fn visit_i16x8_mul(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_mul, simd::i16x8_mul) + } + + fn visit_i16x8_min_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_min_s, simd::i16x8_min_s) + } + + fn visit_i16x8_min_u(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_min_u, simd::i16x8_min_u) + } + + fn visit_i16x8_max_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_max_s, simd::i16x8_max_s) + } + + fn visit_i16x8_max_u(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_max_u, simd::i16x8_max_u) + } + + fn visit_i16x8_avgr_u(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i16x8_avgr_u, simd::i16x8_avgr_u) + } + + fn visit_i16x8_extmul_low_i8x16_s(&mut self) -> Self::Output { + self.translate_simd_binary( + Instruction::i16x8_extmul_low_i8x16_s, + simd::i16x8_extmul_low_i8x16_s, + ) + } + + fn visit_i16x8_extmul_high_i8x16_s(&mut self) -> Self::Output { + self.translate_simd_binary( + Instruction::i16x8_extmul_high_i8x16_s, + simd::i16x8_extmul_high_i8x16_s, + ) + } + + fn visit_i16x8_extmul_low_i8x16_u(&mut self) -> Self::Output { + self.translate_simd_binary( + Instruction::i16x8_extmul_low_i8x16_u, + simd::i16x8_extmul_low_i8x16_u, + ) + } + + fn visit_i16x8_extmul_high_i8x16_u(&mut self) -> Self::Output { + self.translate_simd_binary( + Instruction::i16x8_extmul_high_i8x16_u, + simd::i16x8_extmul_high_i8x16_u, + ) + } + + fn visit_i32x4_extadd_pairwise_i16x8_s(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i32x4_extadd_pairwise_i16x8_s, + simd::i32x4_extadd_pairwise_i16x8_s, + ) + } + + fn visit_i32x4_extadd_pairwise_i16x8_u(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i32x4_extadd_pairwise_i16x8_u, + simd::i32x4_extadd_pairwise_i16x8_u, + ) + } + + fn visit_i32x4_abs(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::i32x4_abs, simd::i32x4_abs) + } + + fn visit_i32x4_neg(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::i32x4_neg, simd::i32x4_neg) + } + + fn visit_i32x4_all_true(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::i32x4_all_true, simd::i32x4_all_true) + } + + fn visit_i32x4_bitmask(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::i32x4_bitmask, simd::i32x4_bitmask) + } + + fn visit_i32x4_extend_low_i16x8_s(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i32x4_extend_low_i16x8_s, + simd::i32x4_extend_low_i16x8_s, + ) + } + + fn visit_i32x4_extend_high_i16x8_s(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i32x4_extend_high_i16x8_s, + simd::i32x4_extend_high_i16x8_s, + ) + } + + fn visit_i32x4_extend_low_i16x8_u(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i32x4_extend_low_i16x8_u, + simd::i32x4_extend_low_i16x8_u, + ) + } + + fn visit_i32x4_extend_high_i16x8_u(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i32x4_extend_high_i16x8_u, + simd::i32x4_extend_high_i16x8_u, + ) + } + + fn visit_i32x4_shl(&mut self) -> Self::Output { + self.translate_simd_shift::( + Instruction::i32x4_shl, + Instruction::i32x4_shl_by, + simd::i32x4_shl, + ) + } + + fn visit_i32x4_shr_s(&mut self) -> Self::Output { + self.translate_simd_shift::( + Instruction::i32x4_shr_s, + Instruction::i32x4_shr_s_by, + simd::i32x4_shr_s, + ) + } + + fn visit_i32x4_shr_u(&mut self) -> Self::Output { + self.translate_simd_shift::( + Instruction::i32x4_shr_u, + Instruction::i32x4_shr_u_by, + simd::i32x4_shr_u, + ) + } + + fn visit_i32x4_add(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i32x4_add, simd::i32x4_add) + } + + fn visit_i32x4_sub(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i32x4_sub, simd::i32x4_sub) + } + + fn visit_i32x4_mul(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i32x4_mul, simd::i32x4_mul) + } + + fn visit_i32x4_min_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i32x4_min_s, simd::i32x4_min_s) + } + + fn visit_i32x4_min_u(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i32x4_min_u, simd::i32x4_min_u) + } + + fn visit_i32x4_max_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i32x4_max_s, simd::i32x4_max_s) + } + + fn visit_i32x4_max_u(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i32x4_max_u, simd::i32x4_max_u) + } + + fn visit_i32x4_dot_i16x8_s(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i32x4_dot_i16x8_s, simd::i32x4_dot_i16x8_s) + } + + fn visit_i32x4_extmul_low_i16x8_s(&mut self) -> Self::Output { + self.translate_simd_binary( + Instruction::i32x4_extmul_low_i16x8_s, + simd::i32x4_extmul_low_i16x8_s, + ) + } + + fn visit_i32x4_extmul_high_i16x8_s(&mut self) -> Self::Output { + self.translate_simd_binary( + Instruction::i32x4_extmul_high_i16x8_s, + simd::i32x4_extmul_high_i16x8_s, + ) + } + + fn visit_i32x4_extmul_low_i16x8_u(&mut self) -> Self::Output { + self.translate_simd_binary( + Instruction::i32x4_extmul_low_i16x8_u, + simd::i32x4_extmul_low_i16x8_u, + ) + } + + fn visit_i32x4_extmul_high_i16x8_u(&mut self) -> Self::Output { + self.translate_simd_binary( + Instruction::i32x4_extmul_high_i16x8_u, + simd::i32x4_extmul_high_i16x8_u, + ) + } + + fn visit_i64x2_abs(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::i64x2_abs, simd::i64x2_abs) + } + + fn visit_i64x2_neg(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::i64x2_neg, simd::i64x2_neg) + } + + fn visit_i64x2_all_true(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::i64x2_all_true, simd::i64x2_all_true) + } + + fn visit_i64x2_bitmask(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::i64x2_bitmask, simd::i64x2_bitmask) + } + + fn visit_i64x2_extend_low_i32x4_s(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i64x2_extend_low_i32x4_s, + simd::i64x2_extend_low_i32x4_s, + ) + } + + fn visit_i64x2_extend_high_i32x4_s(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i64x2_extend_high_i32x4_s, + simd::i64x2_extend_high_i32x4_s, + ) + } + + fn visit_i64x2_extend_low_i32x4_u(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i64x2_extend_low_i32x4_u, + simd::i64x2_extend_low_i32x4_u, + ) + } + + fn visit_i64x2_extend_high_i32x4_u(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i64x2_extend_high_i32x4_u, + simd::i64x2_extend_high_i32x4_u, + ) + } + + fn visit_i64x2_shl(&mut self) -> Self::Output { + self.translate_simd_shift::( + Instruction::i64x2_shl, + Instruction::i64x2_shl_by, + simd::i64x2_shl, + ) + } + + fn visit_i64x2_shr_s(&mut self) -> Self::Output { + self.translate_simd_shift::( + Instruction::i64x2_shr_s, + Instruction::i64x2_shr_s_by, + simd::i64x2_shr_s, + ) + } + + fn visit_i64x2_shr_u(&mut self) -> Self::Output { + self.translate_simd_shift::( + Instruction::i64x2_shr_u, + Instruction::i64x2_shr_u_by, + simd::i64x2_shr_u, + ) + } + + fn visit_i64x2_add(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i64x2_add, simd::i64x2_add) + } + + fn visit_i64x2_sub(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i64x2_sub, simd::i64x2_sub) + } + + fn visit_i64x2_mul(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::i64x2_mul, simd::i64x2_mul) + } + + fn visit_i64x2_extmul_low_i32x4_s(&mut self) -> Self::Output { + self.translate_simd_binary( + Instruction::i64x2_extmul_low_i32x4_s, + simd::i64x2_extmul_low_i32x4_s, + ) + } + + fn visit_i64x2_extmul_high_i32x4_s(&mut self) -> Self::Output { + self.translate_simd_binary( + Instruction::i64x2_extmul_high_i32x4_s, + simd::i64x2_extmul_high_i32x4_s, + ) + } + + fn visit_i64x2_extmul_low_i32x4_u(&mut self) -> Self::Output { + self.translate_simd_binary( + Instruction::i64x2_extmul_low_i32x4_u, + simd::i64x2_extmul_low_i32x4_u, + ) + } + + fn visit_i64x2_extmul_high_i32x4_u(&mut self) -> Self::Output { + self.translate_simd_binary( + Instruction::i64x2_extmul_high_i32x4_u, + simd::i64x2_extmul_high_i32x4_u, + ) + } + + fn visit_f32x4_ceil(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::f32x4_ceil, simd::f32x4_ceil) + } + + fn visit_f32x4_floor(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::f32x4_floor, simd::f32x4_floor) + } + + fn visit_f32x4_trunc(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::f32x4_trunc, simd::f32x4_trunc) + } + + fn visit_f32x4_nearest(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::f32x4_nearest, simd::f32x4_nearest) + } + + fn visit_f32x4_abs(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::f32x4_abs, simd::f32x4_abs) + } + + fn visit_f32x4_neg(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::f32x4_neg, simd::f32x4_neg) + } + + fn visit_f32x4_sqrt(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::f32x4_sqrt, simd::f32x4_sqrt) + } + + fn visit_f32x4_add(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f32x4_add, simd::f32x4_add) + } + + fn visit_f32x4_sub(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f32x4_sub, simd::f32x4_sub) + } + + fn visit_f32x4_mul(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f32x4_mul, simd::f32x4_mul) + } + + fn visit_f32x4_div(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f32x4_div, simd::f32x4_div) + } + + fn visit_f32x4_min(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f32x4_min, simd::f32x4_min) + } + + fn visit_f32x4_max(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f32x4_max, simd::f32x4_max) + } + + fn visit_f32x4_pmin(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f32x4_pmin, simd::f32x4_pmin) + } + + fn visit_f32x4_pmax(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f32x4_pmax, simd::f32x4_pmax) + } + + fn visit_f64x2_ceil(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::f64x2_ceil, simd::f64x2_ceil) + } + + fn visit_f64x2_floor(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::f64x2_floor, simd::f64x2_floor) + } + + fn visit_f64x2_trunc(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::f64x2_trunc, simd::f64x2_trunc) + } + + fn visit_f64x2_nearest(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::f64x2_nearest, simd::f64x2_nearest) + } + + fn visit_f64x2_abs(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::f64x2_abs, simd::f64x2_abs) + } + + fn visit_f64x2_neg(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::f64x2_neg, simd::f64x2_neg) + } + + fn visit_f64x2_sqrt(&mut self) -> Self::Output { + self.translate_simd_unary(Instruction::f64x2_sqrt, simd::f64x2_sqrt) + } + + fn visit_f64x2_add(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f64x2_add, simd::f64x2_add) + } + + fn visit_f64x2_sub(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f64x2_sub, simd::f64x2_sub) + } + + fn visit_f64x2_mul(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f64x2_mul, simd::f64x2_mul) + } + + fn visit_f64x2_div(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f64x2_div, simd::f64x2_div) + } + + fn visit_f64x2_min(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f64x2_min, simd::f64x2_min) + } + + fn visit_f64x2_max(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f64x2_max, simd::f64x2_max) + } + + fn visit_f64x2_pmin(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f64x2_pmin, simd::f64x2_pmin) + } + + fn visit_f64x2_pmax(&mut self) -> Self::Output { + self.translate_simd_binary(Instruction::f64x2_pmax, simd::f64x2_pmax) + } + + fn visit_i32x4_trunc_sat_f32x4_s(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i32x4_trunc_sat_f32x4_s, + simd::i32x4_trunc_sat_f32x4_s, + ) + } + + fn visit_i32x4_trunc_sat_f32x4_u(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i32x4_trunc_sat_f32x4_u, + simd::i32x4_trunc_sat_f32x4_u, + ) + } + + fn visit_f32x4_convert_i32x4_s(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::f32x4_convert_i32x4_s, + simd::f32x4_convert_i32x4_s, + ) + } + + fn visit_f32x4_convert_i32x4_u(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::f32x4_convert_i32x4_u, + simd::f32x4_convert_i32x4_u, + ) + } + + fn visit_i32x4_trunc_sat_f64x2_s_zero(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i32x4_trunc_sat_f64x2_s_zero, + simd::i32x4_trunc_sat_f64x2_s_zero, + ) + } + + fn visit_i32x4_trunc_sat_f64x2_u_zero(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::i32x4_trunc_sat_f64x2_u_zero, + simd::i32x4_trunc_sat_f64x2_u_zero, + ) + } + + fn visit_f64x2_convert_low_i32x4_s(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::f64x2_convert_low_i32x4_s, + simd::f64x2_convert_low_i32x4_s, + ) + } + + fn visit_f64x2_convert_low_i32x4_u(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::f64x2_convert_low_i32x4_u, + simd::f64x2_convert_low_i32x4_u, + ) + } + + fn visit_f32x4_demote_f64x2_zero(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::f32x4_demote_f64x2_zero, + simd::f32x4_demote_f64x2_zero, + ) + } + + fn visit_f64x2_promote_low_f32x4(&mut self) -> Self::Output { + self.translate_simd_unary( + Instruction::f64x2_promote_low_f32x4, + simd::f64x2_promote_low_f32x4, + ) + } + + fn visit_i8x16_relaxed_swizzle(&mut self) -> Self::Output { + self.visit_i8x16_swizzle() + } + + fn visit_i32x4_relaxed_trunc_f32x4_s(&mut self) -> Self::Output { + self.visit_i32x4_trunc_sat_f32x4_s() + } + + fn visit_i32x4_relaxed_trunc_f32x4_u(&mut self) -> Self::Output { + self.visit_i32x4_trunc_sat_f32x4_u() + } + + fn visit_i32x4_relaxed_trunc_f64x2_s_zero(&mut self) -> Self::Output { + self.visit_i32x4_trunc_sat_f64x2_s_zero() + } + + fn visit_i32x4_relaxed_trunc_f64x2_u_zero(&mut self) -> Self::Output { + self.visit_i32x4_trunc_sat_f64x2_u_zero() + } + + fn visit_f32x4_relaxed_madd(&mut self) -> Self::Output { + self.translate_simd_ternary(Instruction::f32x4_relaxed_madd, simd::f32x4_relaxed_madd) + } + + fn visit_f32x4_relaxed_nmadd(&mut self) -> Self::Output { + self.translate_simd_ternary(Instruction::f32x4_relaxed_nmadd, simd::f32x4_relaxed_nmadd) + } + + fn visit_f64x2_relaxed_madd(&mut self) -> Self::Output { + self.translate_simd_ternary(Instruction::f64x2_relaxed_madd, simd::f64x2_relaxed_madd) + } + + fn visit_f64x2_relaxed_nmadd(&mut self) -> Self::Output { + self.translate_simd_ternary(Instruction::f64x2_relaxed_nmadd, simd::f64x2_relaxed_nmadd) + } + + fn visit_i8x16_relaxed_laneselect(&mut self) -> Self::Output { + self.visit_v128_bitselect() + } + + fn visit_i16x8_relaxed_laneselect(&mut self) -> Self::Output { + self.visit_v128_bitselect() + } + + fn visit_i32x4_relaxed_laneselect(&mut self) -> Self::Output { + self.visit_v128_bitselect() + } + + fn visit_i64x2_relaxed_laneselect(&mut self) -> Self::Output { + self.visit_v128_bitselect() + } + + fn visit_f32x4_relaxed_min(&mut self) -> Self::Output { + self.visit_f32x4_min() + } + + fn visit_f32x4_relaxed_max(&mut self) -> Self::Output { + self.visit_f32x4_max() + } + + fn visit_f64x2_relaxed_min(&mut self) -> Self::Output { + self.visit_f64x2_min() + } + + fn visit_f64x2_relaxed_max(&mut self) -> Self::Output { + self.visit_f64x2_max() + } + + fn visit_i16x8_relaxed_q15mulr_s(&mut self) -> Self::Output { + self.visit_i16x8_q15mulr_sat_s() + } + + fn visit_i16x8_relaxed_dot_i8x16_i7x16_s(&mut self) -> Self::Output { + self.translate_simd_binary( + Instruction::i16x8_relaxed_dot_i8x16_i7x16_s, + simd::i16x8_relaxed_dot_i8x16_i7x16_s, + ) + } + + fn visit_i32x4_relaxed_dot_i8x16_i7x16_add_s(&mut self) -> Self::Output { + self.translate_simd_ternary( + Instruction::i32x4_relaxed_dot_i8x16_i7x16_add_s, + simd::i32x4_relaxed_dot_i8x16_i7x16_add_s, + ) + } +} diff --git a/crates/wasmi/src/engine/translator/func2/stack/control.rs b/crates/wasmi/src/engine/translator/func2/stack/control.rs new file mode 100644 index 0000000000..6a1db3082f --- /dev/null +++ b/crates/wasmi/src/engine/translator/func2/stack/control.rs @@ -0,0 +1,800 @@ +use super::{Operand, Reset}; +use crate::{ + engine::{ + translator::{labels::LabelRef, utils::Instr}, + BlockType, + }, + Engine, +}; +use alloc::vec::{Drain, Vec}; + +#[cfg(doc)] +use crate::ir::Instruction; + +/// The height of the operand stack upon entering a [`ControlFrame`]. +#[derive(Debug, Copy, Clone)] +pub struct StackHeight(u16); + +impl From for usize { + fn from(height: StackHeight) -> Self { + usize::from(height.0) + } +} + +impl From for StackHeight { + fn from(height: usize) -> Self { + let Ok(height) = u16::try_from(height) else { + panic!("out of bounds stack height: {height}") + }; + Self(height) + } +} + +/// The Wasm control stack. +#[derive(Debug, Default)] +pub struct ControlStack { + /// The stack of control frames. + frames: Vec, + /// Special operand stack to memorize operands for `else` control frames. + else_operands: ElseOperands, + /// This is `true` if an `if` with else providers was just popped from the stack. + /// + /// # Note + /// + /// This means that its associated `else` operands need to be taken care of by + /// either pushing back an `else` control frame or by manually popping them off + /// the control stack. + orphaned_else_operands: bool, +} + +/// Duplicated operands for Wasm `else` control frames. +#[derive(Debug, Default)] +pub struct ElseOperands { + /// The end indices of each `else` operands. + ends: Vec, + /// All operands of all allocated `else` control frames. + operands: Vec, +} + +impl Reset for ElseOperands { + fn reset(&mut self) { + self.ends.clear(); + self.operands.clear(); + } +} + +impl ElseOperands { + /// Pushes operands for a new Wasm `else` control frame. + pub fn push(&mut self, operands: impl IntoIterator) { + self.operands.extend(operands); + let end = self.operands.len(); + self.ends.push(end); + } + + /// Pops the top-most Wasm `else` operands from `self` and returns them. + pub fn pop(&mut self) -> Option> { + let end = self.ends.pop()?; + let start = self.ends.last().copied().unwrap_or(0); + Some(self.operands.drain(start..end)) + } +} + +impl Reset for ControlStack { + fn reset(&mut self) { + self.frames.clear(); + self.else_operands.reset(); + self.orphaned_else_operands = false; + } +} + +impl ControlStack { + /// Returns `true` if `self` is empty. + pub fn is_empty(&self) -> bool { + self.height() == 0 + } + + /// Returns the height of the [`ControlStack`]. + pub fn height(&self) -> usize { + self.frames.len() + } + + /// Pushes a new unreachable Wasm control frame onto the [`ControlStack`]. + pub fn push_unreachable(&mut self, kind: ControlFrameKind) { + debug_assert!(!self.orphaned_else_operands); + self.frames.push(ControlFrame::from(kind)) + } + + /// Pushes a new Wasm `block` onto the [`ControlStack`]. + pub fn push_block( + &mut self, + ty: BlockType, + height: usize, + label: LabelRef, + consume_fuel: Option, + ) { + debug_assert!(!self.orphaned_else_operands); + self.frames.push(ControlFrame::from(BlockControlFrame { + ty, + height: StackHeight::from(height), + is_branched_to: false, + consume_fuel, + label, + })) + } + + /// Pushes a new Wasm `loop` onto the [`ControlStack`]. + pub fn push_loop( + &mut self, + ty: BlockType, + height: usize, + label: LabelRef, + consume_fuel: Option, + ) { + debug_assert!(!self.orphaned_else_operands); + self.frames.push(ControlFrame::from(LoopControlFrame { + ty, + height: StackHeight::from(height), + is_branched_to: false, + consume_fuel, + label, + })) + } + + /// Pushes a new Wasm `if` onto the [`ControlStack`]. + pub fn push_if( + &mut self, + ty: BlockType, + height: usize, + label: LabelRef, + consume_fuel: Option, + reachability: IfReachability, + else_operands: impl IntoIterator, + ) { + debug_assert!(!self.orphaned_else_operands); + self.frames.push(ControlFrame::from(IfControlFrame { + ty, + height: StackHeight::from(height), + is_branched_to: false, + consume_fuel, + label, + reachability, + })); + if matches!(reachability, IfReachability::Both { .. }) { + self.else_operands.push(else_operands); + } + } + + /// Pushes a new Wasm `else` onto the [`ControlStack`]. + /// + /// Returns iterator yielding the memorized `else` operands. + pub fn push_else( + &mut self, + if_frame: IfControlFrame, + consume_fuel: Option, + is_end_of_then_reachable: bool, + ) { + debug_assert!(!self.orphaned_else_operands); + let ty = if_frame.ty(); + let height = if_frame.height(); + let label = if_frame.label(); + let is_branched_to = if_frame.is_branched_to(); + let reachability = match if_frame.reachability { + IfReachability::Both { .. } => ElseReachability::Both { + is_end_of_then_reachable, + }, + IfReachability::OnlyThen => ElseReachability::OnlyThen { + is_end_of_then_reachable, + }, + IfReachability::OnlyElse => ElseReachability::OnlyElse, + }; + self.frames.push(ControlFrame::from(ElseControlFrame { + ty, + height: StackHeight::from(height), + is_branched_to, + consume_fuel, + label, + reachability, + })); + } + + /// Pops the top-most [`ControlFrame`] and returns it if any. + pub fn pop(&mut self) -> Option { + debug_assert!(!self.orphaned_else_operands); + let frame = self.frames.pop()?; + self.orphaned_else_operands = match &frame { + ControlFrame::If(frame) => { + matches!(frame.reachability, IfReachability::Both { .. }) + } + _ => false, + }; + Some(frame) + } + + /// Pops the top-most `else` operands from the control stack. + /// + /// # Panics (Debug) + /// + /// If the `else` operands are not in orphaned state. + pub fn pop_else_operands(&mut self) -> Drain<'_, Operand> { + debug_assert!(self.orphaned_else_operands); + let Some(else_operands) = self.else_operands.pop() else { + panic!("missing `else` operands") + }; + self.orphaned_else_operands = false; + else_operands + } + + /// Returns a shared reference to the [`ControlFrame`] at `depth` if any. + pub fn get(&self, depth: usize) -> &ControlFrame { + let height = self.height(); + self.frames.iter().rev().nth(depth).unwrap_or_else(|| { + panic!( + "out of bounds control frame at depth (={depth}) for stack of height (={height})" + ) + }) + } + + /// Returns an exclusive reference to the [`ControlFrame`] at `depth` if any. + pub fn get_mut(&mut self, depth: usize) -> ControlFrameMut<'_> { + let height = self.height(); + self.frames + .iter_mut() + .rev() + .nth(depth) + .map(ControlFrameMut) + .unwrap_or_else(|| { + panic!( + "out of bounds control frame at depth (={depth}) for stack of height (={height})" + ) + }) + } +} + +/// An exclusive reference to a [`ControlFrame`]. +#[derive(Debug)] +pub struct ControlFrameMut<'a>(&'a mut ControlFrame); + +impl<'a> ControlFrameBase for ControlFrameMut<'a> { + fn ty(&self) -> BlockType { + self.0.ty() + } + + fn height(&self) -> usize { + self.0.height() + } + + fn label(&self) -> LabelRef { + self.0.label() + } + + fn is_branched_to(&self) -> bool { + self.0.is_branched_to() + } + + fn branch_to(&mut self) { + self.0.branch_to() + } + + fn len_branch_params(&self, engine: &Engine) -> u16 { + self.0.len_branch_params(engine) + } + + fn consume_fuel_instr(&self) -> Option { + self.0.consume_fuel_instr() + } +} + +/// An acquired branch target. +#[derive(Debug)] +pub enum AcquiredTarget<'stack> { + /// The branch targets the function enclosing `block` and therefore is a `return`. + Return(ControlFrameMut<'stack>), + /// The branch targets a regular [`ControlFrame`]. + Branch(ControlFrameMut<'stack>), +} + +impl<'stack> AcquiredTarget<'stack> { + /// Returns an exclusive reference to the [`ControlFrame`] of the [`AcquiredTarget`]. + pub fn control_frame(self) -> ControlFrameMut<'stack> { + match self { + Self::Return(frame) => frame, + Self::Branch(frame) => frame, + } + } +} + +impl ControlStack { + /// Acquires the target [`ControlFrame`] at the given relative `depth`. + pub fn acquire_target(&mut self, depth: usize) -> AcquiredTarget<'_> { + let is_root = self.is_root(depth); + let frame = self.get_mut(depth); + if is_root { + AcquiredTarget::Return(frame) + } else { + AcquiredTarget::Branch(frame) + } + } + + /// Returns `true` if `depth` points to the first control flow frame. + fn is_root(&self, depth: usize) -> bool { + if self.frames.is_empty() { + return false; + } + depth == self.height() - 1 + } +} + +/// A Wasm control frame. +#[derive(Debug)] +pub enum ControlFrame { + /// A Wasm `block` control frame. + Block(BlockControlFrame), + /// A Wasm `loop` control frame. + Loop(LoopControlFrame), + /// A Wasm `if` control frame. + If(IfControlFrame), + /// A Wasm `else` control frame. + Else(ElseControlFrame), + /// A generic unreachable control frame. + Unreachable(ControlFrameKind), +} + +impl From for ControlFrame { + fn from(frame: BlockControlFrame) -> Self { + Self::Block(frame) + } +} + +impl From for ControlFrame { + fn from(frame: LoopControlFrame) -> Self { + Self::Loop(frame) + } +} + +impl From for ControlFrame { + fn from(frame: IfControlFrame) -> Self { + Self::If(frame) + } +} + +impl From for ControlFrame { + fn from(frame: ElseControlFrame) -> Self { + Self::Else(frame) + } +} + +impl From for ControlFrame { + fn from(frame: ControlFrameKind) -> Self { + Self::Unreachable(frame) + } +} + +/// Trait implemented by control frame types that share a common API. +pub trait ControlFrameBase { + /// Returns the [`BlockType`] of the [`BlockControlFrame`]. + fn ty(&self) -> BlockType; + + /// Returns the height of the [`BlockControlFrame`]. + fn height(&self) -> usize; + + /// Returns the branch label of `self`. + fn label(&self) -> LabelRef; + + /// Returns `true` if there exists a branch to `self.` + fn is_branched_to(&self) -> bool; + + /// Makes `self` aware that there is a branch to it. + fn branch_to(&mut self); + + /// Returns the number of operands required for branching to `self`. + fn len_branch_params(&self, engine: &Engine) -> u16; + + /// Returns a reference to the [`Instruction::ConsumeFuel`] of `self`. + /// + /// Returns `None` if fuel metering is disabled. + fn consume_fuel_instr(&self) -> Option; +} + +impl ControlFrameBase for ControlFrame { + fn ty(&self) -> BlockType { + match self { + ControlFrame::Block(frame) => frame.ty(), + ControlFrame::Loop(frame) => frame.ty(), + ControlFrame::If(frame) => frame.ty(), + ControlFrame::Else(frame) => frame.ty(), + ControlFrame::Unreachable(_) => { + panic!("invalid query for unreachable control frame: `ControlFrameBase::ty`") + } + } + } + + fn height(&self) -> usize { + match self { + ControlFrame::Block(frame) => frame.height(), + ControlFrame::Loop(frame) => frame.height(), + ControlFrame::If(frame) => frame.height(), + ControlFrame::Else(frame) => frame.height(), + ControlFrame::Unreachable(_) => { + panic!("invalid query for unreachable control frame: `ControlFrameBase::height`") + } + } + } + + fn label(&self) -> LabelRef { + match self { + ControlFrame::Block(frame) => frame.label(), + ControlFrame::Loop(frame) => frame.label(), + ControlFrame::If(frame) => frame.label(), + ControlFrame::Else(frame) => frame.label(), + ControlFrame::Unreachable(_) => { + panic!("invalid query for unreachable control frame: `ControlFrame::label`") + } + } + } + + fn is_branched_to(&self) -> bool { + match self { + ControlFrame::Block(frame) => frame.is_branched_to(), + ControlFrame::Loop(frame) => frame.is_branched_to(), + ControlFrame::If(frame) => frame.is_branched_to(), + ControlFrame::Else(frame) => frame.is_branched_to(), + ControlFrame::Unreachable(_) => { + panic!( + "invalid query for unreachable control frame: `ControlFrame::is_branched_to`" + ) + } + } + } + + fn branch_to(&mut self) { + match self { + ControlFrame::Block(frame) => frame.branch_to(), + ControlFrame::Loop(frame) => frame.branch_to(), + ControlFrame::If(frame) => frame.branch_to(), + ControlFrame::Else(frame) => frame.branch_to(), + ControlFrame::Unreachable(_) => { + panic!("invalid query for unreachable control frame: `ControlFrame::branch_to`") + } + } + } + + fn len_branch_params(&self, engine: &Engine) -> u16 { + match self { + ControlFrame::Block(frame) => frame.len_branch_params(engine), + ControlFrame::Loop(frame) => frame.len_branch_params(engine), + ControlFrame::If(frame) => frame.len_branch_params(engine), + ControlFrame::Else(frame) => frame.len_branch_params(engine), + ControlFrame::Unreachable(_) => { + panic!("invalid query for unreachable control frame: `ControlFrame::len_branch_params`") + } + } + } + + fn consume_fuel_instr(&self) -> Option { + match self { + ControlFrame::Block(frame) => frame.consume_fuel_instr(), + ControlFrame::Loop(frame) => frame.consume_fuel_instr(), + ControlFrame::If(frame) => frame.consume_fuel_instr(), + ControlFrame::Else(frame) => frame.consume_fuel_instr(), + ControlFrame::Unreachable(_) => { + panic!("invalid query for unreachable control frame: `ControlFrame::consume_fuel_instr`") + } + } + } +} + +/// A Wasm `block` control frame. +#[derive(Debug)] +pub struct BlockControlFrame { + /// The block type of the [`BlockControlFrame`]. + ty: BlockType, + /// The value stack height upon entering the [`BlockControlFrame`]. + height: StackHeight, + /// This is `true` if there is at least one branch to this [`BlockControlFrame`]. + is_branched_to: bool, + /// The [`BlockControlFrame`]'s [`Instruction::ConsumeFuel`] if fuel metering is enabled. + /// + /// # Note + /// + /// This is `Some` if fuel metering is enabled and `None` otherwise. + consume_fuel: Option, + /// The label used to branch to the [`BlockControlFrame`]. + label: LabelRef, +} + +impl ControlFrameBase for BlockControlFrame { + fn ty(&self) -> BlockType { + self.ty + } + + fn height(&self) -> usize { + self.height.into() + } + + fn label(&self) -> LabelRef { + self.label + } + + fn is_branched_to(&self) -> bool { + self.is_branched_to + } + + fn branch_to(&mut self) { + self.is_branched_to = true; + } + + fn len_branch_params(&self, engine: &Engine) -> u16 { + self.ty.len_results(engine) + } + + fn consume_fuel_instr(&self) -> Option { + self.consume_fuel + } +} + +/// A Wasm `loop` control frame. +#[derive(Debug)] +pub struct LoopControlFrame { + /// The block type of the [`LoopControlFrame`]. + ty: BlockType, + /// The value stack height upon entering the [`LoopControlFrame`]. + height: StackHeight, + /// This is `true` if there is at least one branch to this [`LoopControlFrame`]. + is_branched_to: bool, + /// The [`LoopControlFrame`]'s [`Instruction::ConsumeFuel`] if fuel metering is enabled. + /// + /// # Note + /// + /// This is `Some` if fuel metering is enabled and `None` otherwise. + consume_fuel: Option, + /// The label used to branch to the [`LoopControlFrame`]. + label: LabelRef, +} + +impl ControlFrameBase for LoopControlFrame { + fn ty(&self) -> BlockType { + self.ty + } + + fn height(&self) -> usize { + self.height.into() + } + + fn label(&self) -> LabelRef { + self.label + } + + fn is_branched_to(&self) -> bool { + self.is_branched_to + } + + fn branch_to(&mut self) { + self.is_branched_to = true; + } + + fn len_branch_params(&self, engine: &Engine) -> u16 { + self.ty.len_params(engine) + } + + fn consume_fuel_instr(&self) -> Option { + self.consume_fuel + } +} + +/// A Wasm `if` control frame including its `then` part. +#[derive(Debug)] +pub struct IfControlFrame { + /// The block type of the [`IfControlFrame`]. + ty: BlockType, + /// The value stack height upon entering the [`IfControlFrame`]. + height: StackHeight, + /// This is `true` if there is at least one branch to this [`IfControlFrame`]. + is_branched_to: bool, + /// The [`IfControlFrame`]'s [`Instruction::ConsumeFuel`] if fuel metering is enabled. + /// + /// # Note + /// + /// This is `Some` if fuel metering is enabled and `None` otherwise. + consume_fuel: Option, + /// The label used to branch to the [`IfControlFrame`]. + label: LabelRef, + /// The reachability of the `then` and `else` blocks. + reachability: IfReachability, +} + +impl IfControlFrame { + /// Returns the [`IfReachability`] of the [`IfControlFrame`]. + pub fn reachability(&self) -> IfReachability { + self.reachability + } + + /// Returns `true` if the `else` branch is reachable. + /// + /// # Note + /// + /// The `else` branch is unreachable if the `if` condition is a constant `true` value. + pub fn is_else_reachable(&self) -> bool { + match self.reachability { + IfReachability::Both { .. } | IfReachability::OnlyElse => true, + IfReachability::OnlyThen => false, + } + } +} + +impl ControlFrameBase for IfControlFrame { + fn ty(&self) -> BlockType { + self.ty + } + + fn height(&self) -> usize { + self.height.into() + } + + fn label(&self) -> LabelRef { + self.label + } + + fn is_branched_to(&self) -> bool { + self.is_branched_to + } + + fn branch_to(&mut self) { + self.is_branched_to = true; + } + + fn len_branch_params(&self, engine: &Engine) -> u16 { + self.ty.len_results(engine) + } + + fn consume_fuel_instr(&self) -> Option { + self.consume_fuel + } +} + +/// The reachability of the `if` control flow frame. +#[derive(Debug, Copy, Clone)] +pub enum IfReachability { + /// Both, `then` and `else` blocks of the `if` are reachable. + /// + /// # Note + /// + /// This variant does not mean that necessarily both `then` and `else` + /// blocks do exist and are non-empty. The `then` block might still be + /// empty and the `then` block might still be missing. + Both { else_label: LabelRef }, + /// Only the `then` block of the `if` is reachable. + /// + /// # Note + /// + /// This case happens only in case the `if` has a `true` constant condition. + OnlyThen, + /// Only the `else` block of the `if` is reachable. + /// + /// # Note + /// + /// This case happens only in case the `if` has a `false` constant condition. + OnlyElse, +} + +/// A Wasm `else` control frame part of Wasm `if`. +#[derive(Debug)] +pub struct ElseControlFrame { + /// The block type of the [`ElseControlFrame`]. + ty: BlockType, + /// The value stack height upon entering the [`ElseControlFrame`]. + height: StackHeight, + /// This is `true` if there is at least one branch to this [`ElseControlFrame`]. + is_branched_to: bool, + /// The [`LoopControlFrame`]'s [`Instruction::ConsumeFuel`] if fuel metering is enabled. + /// + /// # Note + /// + /// This is `Some` if fuel metering is enabled and `None` otherwise. + consume_fuel: Option, + /// The label used to branch to the [`ElseControlFrame`]. + label: LabelRef, + /// The reachability of the `then` and `else` blocks. + reachability: ElseReachability, +} + +/// The reachability of the `else` control flow frame. +#[derive(Debug, Copy, Clone)] +pub enum ElseReachability { + /// Both, `then` and `else` blocks of the `if` are reachable. + /// + /// # Note + /// + /// This variant does not mean that necessarily both `then` and `else` + /// blocks do exist and are non-empty. The `then` block might still be + /// empty and the `then` block might still be missing. + Both { + /// Is `true` if code is reachable when entering the `else` block. + /// + /// # Note + /// + /// This means that the end of the `then` block was reachable. + is_end_of_then_reachable: bool, + }, + /// Only the `then` block of the `if` is reachable. + /// + /// # Note + /// + /// This case happens only in case the `if` has a `true` constant condition. + OnlyThen { + /// Is `true` if code is reachable when entering the `else` block. + /// + /// # Note + /// + /// This means that the end of the `then` block was reachable. + is_end_of_then_reachable: bool, + }, + /// Only the `else` block of the `if` is reachable. + /// + /// # Note + /// + /// This case happens only in case the `if` has a `false` constant condition. + OnlyElse, +} + +impl ElseControlFrame { + /// Returns the [`ElseReachability`] of the [`ElseReachability`]. + pub fn reachability(&self) -> ElseReachability { + self.reachability + } + + /// Returns `true` if the end of the `then` branch is reachable. + pub fn is_end_of_then_reachable(&self) -> bool { + match self.reachability { + ElseReachability::Both { + is_end_of_then_reachable, + } + | ElseReachability::OnlyThen { + is_end_of_then_reachable, + } => is_end_of_then_reachable, + ElseReachability::OnlyElse => false, + } + } +} + +impl ControlFrameBase for ElseControlFrame { + fn ty(&self) -> BlockType { + self.ty + } + + fn height(&self) -> usize { + self.height.into() + } + + fn label(&self) -> LabelRef { + self.label + } + + fn is_branched_to(&self) -> bool { + self.is_branched_to + } + + fn branch_to(&mut self) { + self.is_branched_to = true; + } + + fn len_branch_params(&self, engine: &Engine) -> u16 { + self.ty.len_results(engine) + } + + fn consume_fuel_instr(&self) -> Option { + self.consume_fuel + } +} + +/// The kind of a Wasm control frame. +#[derive(Debug, Copy, Clone)] +pub enum ControlFrameKind { + /// An Wasm `block` control frame. + Block, + /// An Wasm `loop` control frame. + Loop, + /// An Wasm `if` control frame. + If, + /// An Wasm `else` control frame. + Else, +} diff --git a/crates/wasmi/src/engine/translator/func2/stack/locals.rs b/crates/wasmi/src/engine/translator/func2/stack/locals.rs new file mode 100644 index 0000000000..b38211c38c --- /dev/null +++ b/crates/wasmi/src/engine/translator/func2/stack/locals.rs @@ -0,0 +1,218 @@ +use super::{OperandIdx, Reset}; +use crate::{core::ValType, engine::TranslationError, Error}; +use alloc::vec::Vec; +use core::{cmp, iter}; + +/// A local variable index. +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub struct LocalIdx(u32); + +impl From for LocalIdx { + fn from(index: u32) -> Self { + Self(index) + } +} + +impl From for u32 { + fn from(index: LocalIdx) -> Self { + index.0 + } +} + +/// Stores definitions of locals. +#[derive(Debug, Default, Clone)] +pub struct LocalsRegistry { + /// The types of the first defined local variables. + tys_first: Vec, + /// The types of the remaining defined local variables. + tys_remaining: Vec, + /// The first operand for the local on the stack if any. + first_operands: Vec>, +} + +impl Reset for LocalsRegistry { + fn reset(&mut self) { + self.tys_first.clear(); + self.tys_remaining.clear(); + self.first_operands.clear(); + } +} + +impl LocalsRegistry { + /// Returns the number of registered local variables in `self`. + pub fn len(&self) -> usize { + self.first_operands.len() + } + + /// The maximum number of local variables per function. + const LOCAL_VARIABLES_MAX: usize = 30_000; + + /// The maximum number of local variables in the fast `tys_first` vector. + const FIRST_TYS_MAX: usize = 100; + + /// Registers `amount` of locals of type `ty` for `self`. + /// + /// # Errors + /// + /// If too many locals are registered. + pub fn register(&mut self, amount: u32, ty: ValType) -> Result<(), Error> { + if amount == 0 { + return Ok(()); + } + let Ok(amount) = usize::try_from(amount) else { + panic!( + "failed to register {amount} local variables of type {ty:?}: out of bounds `usize`" + ) + }; + if self.len().saturating_add(amount) > Self::LOCAL_VARIABLES_MAX { + return Err(Error::from(TranslationError::TooManyFunctionParams)); + } + let vacant_first = Self::FIRST_TYS_MAX.saturating_sub(self.tys_first.len()); + let push_to_first = cmp::min(vacant_first, amount); + self.tys_first.extend(iter::repeat_n(ty, push_to_first)); + let remaining_amount = amount - push_to_first; + let remaining_index = (self.len() + amount - 1) as u32; + if remaining_amount > 0 { + self.tys_remaining + .push(LocalGroup::new(remaining_index, ty)); + } + self.first_operands.extend(iter::repeat_n(None, amount)); + Ok(()) + } + + /// Converts `index` into a `usize` value. + fn local_idx_to_index(index: LocalIdx) -> usize { + let index = u32::from(index); + let Ok(index) = usize::try_from(index) else { + panic!("out of bounds `LocalIdx`: {index}") + }; + index + } + + /// Replaces the first operand for this local on the stack and returns the old one. + /// + /// # Panics + /// + /// If `index` is out of bounds. + pub fn replace_first_operand( + &mut self, + index: LocalIdx, + first_operand: Option, + ) -> Option { + let index = Self::local_idx_to_index(index); + let cell = &mut self.first_operands[index]; + match first_operand { + Some(first_operand) => cell.replace(first_operand), + None => cell.take(), + } + } + + /// Returns the type of the local variable at `index` if any. + /// + /// # Panics + /// + /// If `index` is out of bounds and does not refer to a local in `self`. + pub fn ty(&self, index: LocalIdx) -> ValType { + let index_sz = Self::local_idx_to_index(index); + match self.tys_first.get(index_sz) { + Some(ty) => *ty, + None => self + .ty_slow(index) + .unwrap_or_else(|| panic!("out of bounds local index: {index:?}")), + } + } + + /// Returns the type of the local variable at `index` if any. + /// + /// This is the slow-path for local variables that have been stored in the `remaining` buffer. + #[cold] + fn ty_slow(&self, index: LocalIdx) -> Option { + if self.tys_remaining.is_empty() { + return None; + } + match self + .tys_remaining + .binary_search_by_key(&index.0, LocalGroup::max_index) + { + Err(i) if i == self.tys_remaining.len() => None, + Ok(i) | Err(i) => Some(self.tys_remaining[i].ty()), + } + } +} + +/// A local group of one or more locals sharing a common type. +#[derive(Debug, Copy, Clone)] +struct LocalGroup { + /// The local index of the first local in the group. + max_index: u32, + /// The shared type of the locals in the local group. + ty: ValType, +} + +impl LocalGroup { + /// Creates a new [`LocalGroup`]. + fn new(max_index: u32, ty: ValType) -> Self { + Self { max_index, ty } + } + + /// Returns the maximum index of the local variables in the [`LocalGroup`]. + fn max_index(&self) -> u32 { + self.max_index + } + + /// Returns the [`ValType`] of the [`LocalGroup`]. + fn ty(&self) -> ValType { + self.ty + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn ty_works() { + let mut locals = LocalsRegistry::default(); + for locals_per_type in [1, 2, 10, 100] { + locals.reset(); + let tys = [ValType::I32, ValType::I64, ValType::F32, ValType::F64]; + for ty in tys { + locals.register(locals_per_type, ty).unwrap(); + } + let locals_per_type = locals_per_type as usize; + assert_eq!(locals.len(), locals_per_type * tys.len()); + for i in 0..locals.len() { + assert_eq!(locals.ty(LocalIdx(i as u32)), tys[i / locals_per_type]); + } + } + } + + #[test] + fn locals_followed_by_groups() { + let mut locals = LocalsRegistry::default(); + let len_single = [1, 10, 100]; + let len_groups = [1, 10, 100]; + let locals_per_group = [10, 100]; + for len_single in len_single { + for len_groups in len_groups { + for locals_per_group in locals_per_group { + locals.reset(); + let len_locals = len_single + (len_groups * locals_per_group); + for _ in 0..len_single { + locals.register(1, ValType::I32).unwrap(); + } + for _ in 0..len_groups { + locals.register(locals_per_group, ValType::I64).unwrap(); + } + for i in 0..len_locals { + let ty = match i < len_single { + true => ValType::I32, + false => ValType::I64, + }; + assert_eq!(locals.ty(LocalIdx(i)), ty); + } + } + } + } + } +} diff --git a/crates/wasmi/src/engine/translator/func2/stack/mod.rs b/crates/wasmi/src/engine/translator/func2/stack/mod.rs new file mode 100644 index 0000000000..574b78b815 --- /dev/null +++ b/crates/wasmi/src/engine/translator/func2/stack/mod.rs @@ -0,0 +1,516 @@ +mod control; +mod locals; +mod operand; +mod operands; + +use self::{ + control::ControlStack, + locals::LocalsRegistry, + operands::{OperandStack, StackOperand}, +}; +pub use self::{ + control::{ + AcquiredTarget, + BlockControlFrame, + ControlFrame, + ControlFrameBase, + ControlFrameKind, + ElseControlFrame, + ElseReachability, + IfControlFrame, + IfReachability, + LoopControlFrame, + }, + locals::LocalIdx, + operand::{ImmediateOperand, Operand, TempOperand}, + operands::{OperandIdx, PreservedLocalsIter}, +}; +use super::{Reset, ReusableAllocations}; +use crate::{ + core::{TypedVal, ValType}, + engine::{ + translator::{labels::LabelRef, utils::Instr}, + BlockType, + }, + Engine, + Error, +}; +use alloc::vec::Vec; + +#[cfg(doc)] +use crate::ir::Instruction; + +/// The Wasm value stack during translation from Wasm to Wasmi bytecode. +#[derive(Debug)] +pub struct Stack { + /// The underlying [`Engine`]. + engine: Engine, + /// The Wasm value stack. + operands: OperandStack, + /// The Wasm control stack. + controls: ControlStack, +} + +/// Reusable heap allocations for the [`Stack`]. +#[derive(Debug, Default)] +pub struct StackAllocations { + /// The Wasm value stack. + operands: OperandStack, + /// The Wasm control stack. + controls: ControlStack, +} + +impl Reset for StackAllocations { + fn reset(&mut self) { + self.operands.reset(); + self.controls.reset(); + } +} + +impl ReusableAllocations for Stack { + type Allocations = StackAllocations; + + fn into_allocations(self) -> StackAllocations { + StackAllocations { + operands: self.operands, + controls: self.controls, + } + } +} + +impl Stack { + /// Creates a new empty [`Stack`] from the given `engine`. + pub fn new(engine: &Engine, alloc: StackAllocations) -> Self { + let StackAllocations { operands, controls } = alloc.into_reset(); + Self { + engine: engine.clone(), + operands, + controls, + } + } + + /// Register `amount` local variables of common type `ty`. + /// + /// # Errors + /// + /// If too many local variables are being registered. + pub fn register_locals(&mut self, amount: u32, ty: ValType) -> Result<(), Error> { + self.operands.register_locals(amount, ty) + } + + /// Returns `true` if the control stack is empty. + pub fn is_control_empty(&self) -> bool { + self.controls.is_empty() + } + + /// Returns the current height of the [`Stack`]. + /// + /// # Note + /// + /// The height is equal to the number of [`Operand`]s on the [`Stack`]. + pub fn height(&self) -> usize { + self.operands.height() + } + + /// Returns the maximum height of the [`Stack`]. + /// + /// # Note + /// + /// The height is equal to the number of [`Operand`]s on the [`Stack`]. + pub fn max_height(&self) -> usize { + self.operands.max_height() + } + + /// Truncates `self` to the target `height`. + /// + /// All operands above `height` are dropped. + /// + /// # Panic + /// + /// If `height` is greater than the current height of `self`. + pub fn trunc(&mut self, height: usize) { + debug_assert!(height <= self.height()); + while self.height() > height { + self.pop(); + } + } + + /// Returns `true` is fuel metering is enabled for the associated [`Engine`]. + fn is_fuel_metering_enabled(&self) -> bool { + self.engine.config().get_consume_fuel() + } + + /// Pushes the function enclosing Wasm `block` onto the [`Stack`]. + /// + /// # Note + /// + /// - If `consume_fuel` is `None` fuel metering is expected to be disabled. + /// - If `consume_fuel` is `Some` fuel metering is expected to be enabled. + /// + /// # Errors + /// + /// If the stack height exceeds the maximum height. + pub fn push_func_block( + &mut self, + ty: BlockType, + label: LabelRef, + consume_fuel: Option, + ) -> Result<(), Error> { + debug_assert!(self.controls.is_empty()); + debug_assert!(self.is_fuel_metering_enabled() == consume_fuel.is_some()); + self.controls.push_block(ty, 0, label, consume_fuel); + Ok(()) + } + + /// Pushes a Wasm `block` onto the [`Stack`]. + /// + /// # Note + /// + /// This inherits the `consume_fuel` [`Instr`] from the parent [`ControlFrame`]. + /// + /// # Errors + /// + /// If the stack height exceeds the maximum height. + pub fn push_block(&mut self, ty: BlockType, label: LabelRef) -> Result<(), Error> { + debug_assert!(!self.controls.is_empty()); + let len_params = usize::from(ty.len_params(&self.engine)); + let block_height = self.height() - len_params; + let fuel_metering = self.is_fuel_metering_enabled(); + let consume_fuel = match fuel_metering { + true => { + let consume_instr = self + .controls + .get(0) + .consume_fuel_instr() + .expect("control frame must have consume instructions"); + Some(consume_instr) + } + false => None, + }; + self.controls + .push_block(ty, block_height, label, consume_fuel); + Ok(()) + } + + /// Pushes a Wasm `loop` onto the [`Stack`]. + /// + /// # Panics (debug) + /// + /// - If `consume_fuel` is `None` and fuel metering is enabled. + /// - If any of the Wasm `loop` operand parameters are _not_ [`Operand::Temp`]. + /// + /// # Errors + /// + /// If the stack height exceeds the maximum height. + pub fn push_loop( + &mut self, + ty: BlockType, + label: LabelRef, + consume_fuel: Option, + ) -> Result<(), Error> { + debug_assert!(!self.controls.is_empty()); + debug_assert!(self.is_fuel_metering_enabled() == consume_fuel.is_some()); + let len_params = usize::from(ty.len_params(&self.engine)); + let block_height = self.height() - len_params; + debug_assert!(self + .operands + .peek(len_params) + .all(|operand| operand.is_temp())); + self.controls + .push_loop(ty, block_height, label, consume_fuel); + Ok(()) + } + + /// Pushes a Wasm `if` onto the [`Stack`]. + /// + /// # Panics (debug) + /// + /// If `consume_fuel` is `None` and fuel metering is enabled. + /// + /// # Errors + /// + /// If the stack height exceeds the maximum height. + pub fn push_if( + &mut self, + ty: BlockType, + label: LabelRef, + reachability: IfReachability, + consume_fuel: Option, + ) -> Result<(), Error> { + debug_assert!(!self.controls.is_empty()); + debug_assert!(self.is_fuel_metering_enabled() == consume_fuel.is_some()); + let len_params = usize::from(ty.len_params(&self.engine)); + let block_height = self.height() - len_params; + let else_operands = self.operands.peek(len_params); + debug_assert!(len_params == else_operands.len()); + self.controls.push_if( + ty, + block_height, + label, + consume_fuel, + reachability, + else_operands, + ); + Ok(()) + } + + /// Pushes a Wasm `else` onto the [`Stack`]. + /// + /// # Panics (debug) + /// + /// If `consume_fuel` is `None` and fuel metering is enabled. + /// + /// # Errors + /// + /// If the stack height exceeds the maximum height. + pub fn push_else( + &mut self, + if_frame: IfControlFrame, + is_end_of_then_reachable: bool, + consume_fuel: Option, + ) -> Result<(), Error> { + debug_assert!(self.is_fuel_metering_enabled() == consume_fuel.is_some()); + self.push_else_operands(&if_frame)?; + self.controls + .push_else(if_frame, consume_fuel, is_end_of_then_reachable); + Ok(()) + } + + /// Pushes an unreachable Wasm control onto the [`Stack`]. + /// + /// # Errors + /// + /// If the stack height exceeds the maximum height. + pub fn push_unreachable(&mut self, kind: ControlFrameKind) -> Result<(), Error> { + self.controls.push_unreachable(kind); + Ok(()) + } + + /// Pops the top-most control frame from the control stack and returns it. + /// + /// # Panics + /// + /// If the control stack is empty. + pub fn pop_control(&mut self) -> ControlFrame { + self.controls + .pop() + .unwrap_or_else(|| panic!("tried to pop control from empty control stack")) + } + + /// Pushes the top-most `else` operands from the control stack onto the operand stack. + /// + /// # Panics (Debug) + /// + /// If the `else` operands are not in orphaned state. + pub fn push_else_operands(&mut self, frame: &IfControlFrame) -> Result<(), Error> { + match frame.reachability() { + IfReachability::Both { .. } => {} + IfReachability::OnlyThen | IfReachability::OnlyElse => return Ok(()), + }; + self.trunc(frame.height()); + for else_operand in self.controls.pop_else_operands() { + self.operands.push_operand(else_operand)?; + } + Ok(()) + } + + /// Returns a shared reference to the [`ControlFrame`] at `depth`. + /// + /// # Panics + /// + /// If `depth` is out of bounds for `self`. + pub fn peek_control(&self, depth: usize) -> &ControlFrame { + self.controls.get(depth) + } + + /// Returns an exclusive reference to the [`ControlFrame`] at `depth`. + /// + /// # Note + /// + /// This returns an [`AcquiredTarget`] to differentiate between the function + /// body Wasm `block` and other control frames in order to know whether a branching + /// target returns or branches. + /// + /// # Panics + /// + /// If `depth` is out of bounds for `self`. + pub fn peek_control_mut(&mut self, depth: usize) -> AcquiredTarget<'_> { + self.controls.acquire_target(depth) + } + + /// Pushes the [`Operand`] back to the [`Stack`]. + /// + /// Returns the new [`OperandIdx`]. + /// + /// # Errors + /// + /// - If too many operands have been pushed onto the [`Stack`]. + /// - If the local with `local_idx` does not exist. + pub fn push_operand(&mut self, operand: Operand) -> Result { + self.operands.push_operand(operand) + } + + /// Pushes a local variable with index `local_idx` to the [`Stack`]. + /// + /// # Errors + /// + /// - If too many operands have been pushed onto the [`Stack`]. + /// - If the local with `local_idx` does not exist. + pub fn push_local(&mut self, local_index: LocalIdx) -> Result { + self.operands.push_local(local_index) + } + + /// Pushes a temporary with type `ty` on the [`Stack`]. + /// + /// # Errors + /// + /// If too many operands have been pushed onto the [`Stack`]. + pub fn push_temp(&mut self, ty: ValType, instr: Option) -> Result { + self.operands.push_temp(ty, instr) + } + + /// Pushes an immediate `value` on the [`Stack`]. + /// + /// # Errors + /// + /// If too many operands have been pushed onto the [`Stack`]. + pub fn push_immediate(&mut self, value: impl Into) -> Result { + self.operands.push_immediate(value) + } + + /// Peeks the [`Operand`] at `depth`. + /// + /// # Note + /// + /// A depth of 0 peeks the top-most [`Operand`] on `self`. + /// + /// # Panics + /// + /// If `depth` is out of bounds for `self`. + pub fn peek(&self, depth: usize) -> Operand { + self.operands.get(depth) + } + + /// Peeks the 2 top-most [`Operand`]s. + /// + /// # Panics + /// + /// If there aren't at least 2 [`Operand`]s on the [`Stack`]. + pub fn peek2(&self) -> (Operand, Operand) { + let v0 = self.peek(1); + let v1 = self.peek(0); + (v0, v1) + } + + /// Peeks the 3 top-most [`Operand`]s. + /// + /// # Panics + /// + /// If there aren't at least 2 [`Operand`]s on the [`Stack`]. + pub fn peek3(&self) -> (Operand, Operand, Operand) { + let v0 = self.peek(2); + let v1 = self.peek(1); + let v2 = self.peek(0); + (v0, v1, v2) + } + + /// Peeks the top-most `len` operands from the stack and store them into `buffer`. + /// + /// Operands stored into the buffer are placed in order. + pub fn peek_n(&mut self, len: usize, buffer: &mut Vec) { + buffer.clear(); + buffer.extend((0..len).rev().map(|depth| self.peek(depth))); + } + + /// Pops the top-most [`Operand`] from the [`Stack`]. + /// + /// # Panics + /// + /// If `self` is empty. + pub fn pop(&mut self) -> Operand { + self.operands.pop() + } + + /// Pops the two top-most [`Operand`] from the [`Stack`]. + /// + /// # Note + /// + /// The last returned [`Operand`] is the top-most one. + /// + /// # Panics + /// + /// If `self` does not contain enough operands to pop. + pub fn pop2(&mut self) -> (Operand, Operand) { + let o2 = self.pop(); + let o1 = self.pop(); + (o1, o2) + } + + /// Pops the two top-most [`Operand`] from the [`Stack`]. + /// + /// # Note + /// + /// The last returned [`Operand`] is the top-most one. + /// + /// # Panics + /// + /// If `self` does not contain enough operands to pop. + pub fn pop3(&mut self) -> (Operand, Operand, Operand) { + let o3 = self.pop(); + let o2 = self.pop(); + let o1 = self.pop(); + (o1, o2, o3) + } + + /// Pops `len` operands from the stack and store them into `buffer`. + /// + /// Operands stored into the buffer are placed in order. + pub fn pop_n(&mut self, len: usize, buffer: &mut Vec) { + buffer.clear(); + for _ in 0..len { + let operand = self.pop(); + buffer.push(operand); + } + buffer.reverse(); + } + + /// Preserve all locals on the [`Stack`] that refer to `local_index`. + /// + /// This is done by converting those locals to [`Operand::Temp`] and yielding them. + /// + /// # Note + /// + /// The users must fully consume all items yielded by the returned iterator in order + /// for the local preservation to take full effect. + /// + /// # Panics + /// + /// If the local at `local_index` is out of bounds. + #[must_use] + pub fn preserve_locals(&mut self, local_index: LocalIdx) -> PreservedLocalsIter<'_> { + self.operands.preserve_locals(local_index) + } + + /// Converts and returns the [`Operand`] at `depth` into a [`Operand::Temp`]. + /// + /// # Note + /// + /// - Returns the [`Operand`] at `depth` before being converted to an [`Operand::Temp`]. + /// - [`Operand::Temp`] will have their optional `instr` set to `None`. + /// + /// # Panics + /// + /// If `depth` is out of bounds for the [`Stack`] of operands. + #[must_use] + pub fn operand_to_temp(&mut self, depth: usize) -> Operand { + self.operands.operand_to_temp(depth) + } + + /// Returns the current [`Instruction::ConsumeFuel`] if fuel metering is enabled. + /// + /// Returns `None` otherwise. + pub fn consume_fuel_instr(&self) -> Option { + debug_assert!(!self.controls.is_empty()); + self.controls.get(0).consume_fuel_instr() + } +} diff --git a/crates/wasmi/src/engine/translator/func2/stack/operand.rs b/crates/wasmi/src/engine/translator/func2/stack/operand.rs new file mode 100644 index 0000000000..7e396f5968 --- /dev/null +++ b/crates/wasmi/src/engine/translator/func2/stack/operand.rs @@ -0,0 +1,201 @@ +use super::{LocalIdx, LocalsRegistry, OperandIdx, StackOperand}; +use crate::{ + core::{TypedVal, ValType}, + engine::translator::utils::Instr, +}; + +#[cfg(doc)] +use super::Stack; + +/// An operand on the [`Stack`]. +#[derive(Debug, Copy, Clone)] +pub enum Operand { + /// A local variable operand. + Local(LocalOperand), + /// A temporary operand. + Temp(TempOperand), + /// An immediate value operand. + Immediate(ImmediateOperand), +} + +impl Operand { + /// Creates a new [`Operand`] from the given [`StackOperand`] and its [`OperandIdx`]. + pub(super) fn new(index: OperandIdx, operand: StackOperand, locals: &LocalsRegistry) -> Self { + match operand { + StackOperand::Local { local_index, .. } => Self::local(index, local_index, locals), + StackOperand::Temp { ty, instr } => Self::temp(index, ty, instr), + StackOperand::Immediate { val } => Self::immediate(index, val), + } + } + + /// Returns `true` if `self` and `other` evaluate to the same value. + pub fn is_same(&self, other: &Self) -> bool { + match (self, other) { + (Operand::Local(lhs), Operand::Local(rhs)) => lhs.local_index() == rhs.local_index(), + (Operand::Temp(lhs), Operand::Temp(rhs)) => lhs.operand_index() == rhs.operand_index(), + (Operand::Immediate(lhs), Operand::Immediate(rhs)) => lhs.val() == rhs.val(), + _ => false, + } + } + + /// Creates a local [`Operand`]. + pub(super) fn local( + operand_index: OperandIdx, + local_index: LocalIdx, + locals: &LocalsRegistry, + ) -> Self { + let ty = locals.ty(local_index); + Self::Local(LocalOperand { + operand_index, + local_index, + ty, + }) + } + + /// Creates a temporary [`Operand`]. + pub(super) fn temp(operand_index: OperandIdx, ty: ValType, instr: Option) -> Self { + Self::Temp(TempOperand { + operand_index, + ty, + instr, + }) + } + + /// Creates an immediate [`Operand`]. + pub(super) fn immediate(operand_index: OperandIdx, val: TypedVal) -> Self { + Self::Immediate(ImmediateOperand { operand_index, val }) + } + + /// Returns `true` if `self` is an [`Operand::Local`]. + pub fn is_local(&self) -> bool { + matches!(self, Self::Local(_)) + } + + /// Returns `true` if `self` is an [`Operand::Temp`]. + pub fn is_temp(&self) -> bool { + matches!(self, Self::Temp(_)) + } + + /// Returns `true` if `self` is an [`Operand::Immediate`]. + pub fn is_immediate(&self) -> bool { + matches!(self, Self::Immediate(_)) + } + + /// Returns the [`OperandIdx`] of the [`Operand`]. + pub fn index(&self) -> OperandIdx { + match self { + Operand::Local(operand) => operand.operand_index(), + Operand::Temp(operand) => operand.operand_index(), + Operand::Immediate(operand) => operand.operand_index(), + } + } + + /// Returns the type of the [`Operand`]. + pub fn ty(&self) -> ValType { + match self { + Self::Local(operand) => operand.ty(), + Self::Temp(operand) => operand.ty(), + Self::Immediate(operand) => operand.ty(), + } + } +} + +/// A local variable on the [`Stack`]. +#[derive(Debug, Copy, Clone)] +pub struct LocalOperand { + /// The index of the operand. + operand_index: OperandIdx, + /// The index of the local variable. + local_index: LocalIdx, + /// The type of the local variable. + ty: ValType, +} + +impl From for Operand { + fn from(operand: LocalOperand) -> Self { + Self::Local(operand) + } +} + +impl LocalOperand { + /// Returns the operand index of the [`LocalOperand`]. + pub fn operand_index(&self) -> OperandIdx { + self.operand_index + } + + /// Returns the index of the [`LocalOperand`]. + pub fn local_index(&self) -> LocalIdx { + self.local_index + } + + /// Returns the type of the [`LocalOperand`]. + pub fn ty(&self) -> ValType { + self.ty + } +} + +/// A temporary on the [`Stack`]. +#[derive(Debug, Copy, Clone)] +pub struct TempOperand { + /// The index of the operand. + operand_index: OperandIdx, + /// The type of the temporary. + ty: ValType, + /// The instruction which created this [`TempOperand`] as its result if any. + instr: Option, +} + +impl From for Operand { + fn from(operand: TempOperand) -> Self { + Self::Temp(operand) + } +} + +impl TempOperand { + /// Returns the operand index of the [`TempOperand`]. + pub fn operand_index(&self) -> OperandIdx { + self.operand_index + } + + /// Returns the type of the [`TempOperand`]. + pub fn ty(&self) -> ValType { + self.ty + } + + /// Returns the instruction whcih created this [`TempOperand`] as its result if any. + pub fn instr(&self) -> Option { + self.instr + } +} + +/// An immediate value on the [`Stack`]. +#[derive(Debug, Copy, Clone)] +pub struct ImmediateOperand { + /// The index of the operand. + operand_index: OperandIdx, + /// The value and type of the immediate value. + val: TypedVal, +} + +impl From for Operand { + fn from(operand: ImmediateOperand) -> Self { + Self::Immediate(operand) + } +} + +impl ImmediateOperand { + /// Returns the operand index of the [`ImmediateOperand`]. + pub fn operand_index(&self) -> OperandIdx { + self.operand_index + } + + /// Returns the immediate value (and its type) of the [`ImmediateOperand`]. + pub fn val(&self) -> TypedVal { + self.val + } + + /// Returns the type of the [`ImmediateOperand`]. + pub fn ty(&self) -> ValType { + self.val.ty() + } +} diff --git a/crates/wasmi/src/engine/translator/func2/stack/operands.rs b/crates/wasmi/src/engine/translator/func2/stack/operands.rs new file mode 100644 index 0000000000..a5d59659bb --- /dev/null +++ b/crates/wasmi/src/engine/translator/func2/stack/operands.rs @@ -0,0 +1,425 @@ +use super::{LocalIdx, LocalsRegistry, Operand, Reset}; +use crate::{ + core::{TypedVal, ValType}, + engine::translator::utils::Instr, + Error, +}; +use alloc::vec::Vec; +use core::{num::NonZero, slice}; + +/// A [`StackOperand`] or [`Operand`] index on the [`OperandStack`]. +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub struct OperandIdx(NonZero); + +impl From for usize { + fn from(value: OperandIdx) -> Self { + value.0.get().wrapping_sub(1) + } +} + +impl From for OperandIdx { + fn from(value: usize) -> Self { + let Some(operand_idx) = NonZero::new(value.wrapping_add(1)) else { + panic!("out of bounds `OperandIdx`: {value}") + }; + Self(operand_idx) + } +} + +/// An [`Operand`] on the [`OperandStack`]. +/// +/// This is the internal version of [`Operand`] with information that shall remain +/// hidden to the outside. +#[derive(Debug, Copy, Clone)] +pub enum StackOperand { + /// A local variable. + Local { + /// The index of the local variable. + local_index: LocalIdx, + /// The previous [`StackOperand::Local`] on the [`OperandStack`]. + prev_local: Option, + /// The next [`StackOperand::Local`] on the [`OperandStack`]. + next_local: Option, + }, + /// A temporary value on the [`OperandStack`]. + Temp { + /// The type of the temporary value. + ty: ValType, + /// The instruction which has this [`StackOperand`] as result if any. + instr: Option, + }, + /// An immediate value on the [`OperandStack`]. + Immediate { + /// The value (and type) of the immediate value. + val: TypedVal, + }, +} + +impl StackOperand { + /// Returns the [`ValType`] of the [`StackOperand`]. + pub fn ty(&self, locals: &LocalsRegistry) -> ValType { + match self { + StackOperand::Temp { ty, .. } => *ty, + StackOperand::Immediate { val } => val.ty(), + StackOperand::Local { local_index, .. } => locals.ty(*local_index), + } + } +} + +/// The Wasm operand (or value) stack. +#[derive(Debug, Default)] +pub struct OperandStack { + /// The current set of operands on the [`OperandStack`]. + operands: Vec, + /// All function locals and their associated types. + /// + /// Used to query types of locals and their first local on the [`OperandStack`]. + locals: LocalsRegistry, + /// The maximum height of the [`OperandStack`]. + max_height: usize, +} + +impl Reset for OperandStack { + fn reset(&mut self) { + self.operands.clear(); + self.locals.reset(); + self.max_height = 0; + } +} + +impl OperandStack { + /// Register `amount` local variables of common type `ty`. + /// + /// # Errors + /// + /// If too many local variables are being registered. + pub fn register_locals(&mut self, amount: u32, ty: ValType) -> Result<(), Error> { + self.locals.register(amount, ty)?; + Ok(()) + } + + /// Returns the current height of `self` + /// + /// # Note + /// + /// The height is equal to the number of [`StackOperand`]s on `self`. + pub fn height(&self) -> usize { + self.operands.len() + } + + /// Returns the maximum height of `self`. + /// + /// # Note + /// + /// The height is equal to the number of [`Operand`]s on `self`. + pub fn max_height(&self) -> usize { + self.max_height + } + + /// Updates the maximum stack height if needed. + fn update_max_stack_height(&mut self) { + self.max_height = core::cmp::max(self.max_height, self.height()); + } + + /// Returns the [`OperandIdx`] of the next pushed operand. + fn next_index(&self) -> OperandIdx { + OperandIdx::from(self.operands.len()) + } + + /// Returns the [`OperandIdx`] of the operand at `depth`. + fn depth_to_index(&self, depth: usize) -> OperandIdx { + OperandIdx::from(self.height() - depth - 1) + } + + /// Pushes the [`Operand`] back to the [`OperandStack`]. + /// + /// Returns the new [`OperandIdx`]. + /// + /// # Errors + /// + /// - If too many operands have been pushed onto the [`OperandStack`]. + /// - If the local with `local_idx` does not exist. + pub fn push_operand(&mut self, operand: Operand) -> Result { + match operand { + Operand::Local(operand) => self.push_local(operand.local_index()), + Operand::Temp(operand) => self.push_temp(operand.ty(), operand.instr()), + Operand::Immediate(operand) => self.push_immediate(operand.val()), + } + } + + /// Pushes a local variable with index `local_idx` to the [`OperandStack`]. + /// + /// # Errors + /// + /// - If too many operands have been pushed onto the [`OperandStack`]. + /// - If the local with `local_idx` does not exist. + pub fn push_local(&mut self, local_index: LocalIdx) -> Result { + let operand_index = self.next_index(); + let next_local = self + .locals + .replace_first_operand(local_index, Some(operand_index)); + if let Some(next_local) = next_local { + self.update_prev_local(next_local, Some(operand_index)); + } + self.operands.push(StackOperand::Local { + local_index, + prev_local: None, + next_local, + }); + self.update_max_stack_height(); + Ok(operand_index) + } + + /// Pushes a temporary with type `ty` on the [`OperandStack`]. + /// + /// # Errors + /// + /// If too many operands have been pushed onto the [`OperandStack`]. + pub fn push_temp(&mut self, ty: ValType, instr: Option) -> Result { + let idx = self.next_index(); + self.operands.push(StackOperand::Temp { ty, instr }); + self.update_max_stack_height(); + Ok(idx) + } + + /// Pushes an immediate `value` on the [`OperandStack`]. + /// + /// # Errors + /// + /// If too many operands have been pushed onto the [`OperandStack`]. + pub fn push_immediate(&mut self, value: impl Into) -> Result { + let idx = self.next_index(); + self.operands + .push(StackOperand::Immediate { val: value.into() }); + self.update_max_stack_height(); + Ok(idx) + } + + /// Returns an iterator that yields the last `n` [`Operand`]s. + /// + /// # Panics + /// + /// If `n` is out of bounds for `self`. + pub fn peek(&self, n: usize) -> PeekedOperands<'_> { + let len_operands = self.operands.len(); + let first_index = len_operands - n; + let Some(operands) = self.operands.get(first_index..) else { + return PeekedOperands::empty(&self.locals); + }; + PeekedOperands { + index: first_index, + operands: operands.iter(), + locals: &self.locals, + } + } + + /// Pops the top-most [`StackOperand`] from `self` if any. + /// + /// # Panics + /// + /// If `self` is empty. + pub fn pop(&mut self) -> Operand { + let Some(operand) = self.operands.pop() else { + panic!("tried to pop operand from empty stack"); + }; + let index = self.next_index(); + self.unlink_local(operand); + Operand::new(index, operand, &self.locals) + } + + /// Returns the [`Operand`] at `depth`. + /// + /// # Panics + /// + /// If `depth` is out of bounds for `self`. + pub fn get(&self, depth: usize) -> Operand { + let index = self.depth_to_index(depth); + let operand = self.get_at(index); + Operand::new(index, operand, &self.locals) + } + + /// Returns the [`StackOperand`] at `index`. + /// + /// # Panics + /// + /// If `index` is out of bounds for `self`. + fn get_at(&self, index: OperandIdx) -> StackOperand { + self.operands[usize::from(index)] + } + + /// Converts and returns the [`Operand`] at `depth` into a [`Operand::Temp`]. + /// + /// # Note + /// + /// - Returns the [`Operand`] at `depth` before being converted to an [`Operand::Temp`]. + /// - [`Operand::Temp`] will have their optional `instr` set to `None`. + /// + /// # Panics + /// + /// If `depth` is out of bounds for the [`OperandStack`] of operands. + #[must_use] + pub fn operand_to_temp(&mut self, depth: usize) -> Operand { + let index = self.depth_to_index(depth); + let operand = self.operand_to_temp_at(index); + Operand::new(index, operand, &self.locals) + } + + /// Converts and returns the [`StackOperand`] at `index` into a [`StackOperand::Temp`]. + /// + /// # Note + /// + /// - Returns the [`Operand`] at `index` before being converted to an [`Operand::Temp`]. + /// - [`Operand::Temp`] will have their optional `instr` set to `None`. + /// + /// # Panics + /// + /// If `index` is out of bounds for `self`. + #[must_use] + fn operand_to_temp_at(&mut self, index: OperandIdx) -> StackOperand { + let operand = self.get_at(index); + let ty = operand.ty(&self.locals); + self.unlink_local(operand); + self.operands[usize::from(index)] = StackOperand::Temp { ty, instr: None }; + operand + } + + /// Preserve all locals on the [`OperandStack`] that refer to `local_index`. + /// + /// This is done by converting those locals to [`StackOperand::Temp`] and yielding them. + /// + /// # Note + /// + /// The users must fully consume all items yielded by the returned iterator in order + /// for the local preservation to take full effect. + /// + /// # Panics + /// + /// If the local at `local_index` is out of bounds. + #[must_use] + pub fn preserve_locals(&mut self, local_index: LocalIdx) -> PreservedLocalsIter<'_> { + let ty = self.locals.ty(local_index); + let index = self.locals.replace_first_operand(local_index, None); + PreservedLocalsIter { + operands: self, + index, + ty, + } + } + + /// Unlinks the [`StackOperand::Local`] `operand` at `index` from `self`. + /// + /// Does nothing if `operand` is not a [`StackOperand::Local`]. + fn unlink_local(&mut self, operand: StackOperand) { + let StackOperand::Local { + local_index, + prev_local, + next_local, + } = operand + else { + return; + }; + if prev_local.is_none() { + self.locals.replace_first_operand(local_index, next_local); + } + if let Some(prev_local) = prev_local { + self.update_next_local(prev_local, next_local); + } + if let Some(next_local) = next_local { + self.update_prev_local(next_local, prev_local); + } + } + + /// Updates the `prev_local` of the [`StackOperand::Local`] at `local_index` to `prev_index`. + /// + /// # Panics + /// + /// - If `local_index` does not refer to a [`StackOperand::Local`]. + /// - If `local_index` is out of bounds of the operand stack. + fn update_prev_local(&mut self, local_index: OperandIdx, prev_index: Option) { + match &mut self.operands[usize::from(local_index)] { + StackOperand::Local { prev_local, .. } => { + *prev_local = prev_index; + } + operand => panic!("expected `StackOperand::Local` but found: {operand:?}"), + } + } + + /// Updates the `next_local` of the [`StackOperand::Local`] at `local_index` to `prev_index`. + /// + /// # Panics + /// + /// - If `local_index` does not refer to a [`StackOperand::Local`]. + /// - If `local_index` is out of bounds of the operand stack. + fn update_next_local(&mut self, local_index: OperandIdx, prev_index: Option) { + match &mut self.operands[usize::from(local_index)] { + StackOperand::Local { next_local, .. } => { + *next_local = prev_index; + } + operand => panic!("expected `StackOperand::Local` but found: {operand:?}"), + } + } +} + +/// Iterator yielding preserved local indices while preserving them. +#[derive(Debug)] +pub struct PreservedLocalsIter<'stack> { + /// The underlying operand stack. + operands: &'stack mut OperandStack, + /// The current operand index of the next preserved local if any. + index: Option, + /// Type of local at preserved `local_index`. + ty: ValType, +} + +impl Iterator for PreservedLocalsIter<'_> { + type Item = OperandIdx; + + fn next(&mut self) -> Option { + let index = self.index?; + let operand = self.operands.operand_to_temp_at(index); + self.index = match operand { + StackOperand::Local { next_local, .. } => next_local, + op => panic!("expected `StackOperand::Local` but found: {op:?}"), + }; + Some(index) + } +} + +/// Iterator yielding peeked stack operators. +#[derive(Debug)] +pub struct PeekedOperands<'stack> { + /// The index of the next yielded operand. + index: usize, + /// The iterator of peeked stack operands. + operands: slice::Iter<'stack, StackOperand>, + /// Used to query types of local operands. + locals: &'stack LocalsRegistry, +} + +impl<'stack> PeekedOperands<'stack> { + /// Creates a [`PeekedOperands`] iterator that yields no operands. + pub fn empty(locals: &'stack LocalsRegistry) -> Self { + Self { + index: 0, + operands: [].iter(), + locals, + } + } +} + +impl Iterator for PeekedOperands<'_> { + type Item = Operand; + + fn next(&mut self) -> Option { + let operand = self.operands.next().copied()?; + let index = OperandIdx::from(self.index); + self.index += 1; + Some(Operand::new(index, operand, self.locals)) + } +} + +impl ExactSizeIterator for PeekedOperands<'_> { + fn len(&self) -> usize { + self.operands.len() + } +} diff --git a/crates/wasmi/src/engine/translator/func2/utils.rs b/crates/wasmi/src/engine/translator/func2/utils.rs new file mode 100644 index 0000000000..7211653781 --- /dev/null +++ b/crates/wasmi/src/engine/translator/func2/utils.rs @@ -0,0 +1,66 @@ +use crate::ir::{Const16, Const32, Reg}; + +/// Bail out early in case the current code is unreachable. +/// +/// # Note +/// +/// - This should be prepended to most Wasm operator translation procedures. +/// - If we are in unreachable code most Wasm translation is skipped. Only +/// certain control flow operators such as `End` are going through the +/// translation process. In particular the `End` operator may end unreachable +/// code blocks. +macro_rules! bail_unreachable { + ($this:ident) => {{ + if !$this.reachable { + return ::core::result::Result::Ok(()); + } + }}; +} + +/// Used to swap operands of binary [`Instruction`] constructor. +/// +/// [`Instruction`]: crate::ir::Instruction +macro_rules! swap_ops { + ($make_instr:path) => {{ + |result: $crate::ir::Reg, lhs, rhs| -> $crate::ir::Instruction { + $make_instr(result, rhs, lhs) + } + }}; +} + +/// Implemented by types that can be reset for reuse. +pub trait Reset: Sized { + /// Resets `self` for reuse. + fn reset(&mut self); + + /// Returns `self` in resetted state. + #[must_use] + fn into_reset(self) -> Self { + let mut this = self; + this.reset(); + this + } +} + +/// Types that have reusable heap allocations. +pub trait ReusableAllocations { + /// The type of the reusable heap allocations. + type Allocations: Default + Reset; + + /// Returns the reusable heap allocations of `self`. + fn into_allocations(self) -> Self::Allocations; +} + +/// A 16-bit encoded input to Wasmi instruction. +pub type Input16 = Input>; + +/// A 32-bit encoded input to Wasmi instruction. +pub type Input32 = Input>; + +/// A concrete input to a Wasmi instruction. +pub enum Input { + /// A [`Reg`] operand. + Reg(Reg), + /// A 16-bit encoded immediate value operand. + Immediate(T), +} diff --git a/crates/wasmi/src/engine/translator/func2/visit.rs b/crates/wasmi/src/engine/translator/func2/visit.rs new file mode 100644 index 0000000000..d7a87f0388 --- /dev/null +++ b/crates/wasmi/src/engine/translator/func2/visit.rs @@ -0,0 +1,1928 @@ +use super::{ControlFrame, ControlFrameKind, FuncTranslator, LocalIdx}; +use crate::{ + core::{wasm, FuelCostsProvider, IndexType, Mutability, TrapCode, TypedVal, ValType, F32, F64}, + engine::{ + translator::func2::{ + op, + stack::{AcquiredTarget, IfReachability}, + ControlFrameBase, + Input, + Operand, + }, + BlockType, + }, + ir::{self, Const16, Instruction}, + module::{self, FuncIdx, MemoryIdx, TableIdx, WasmiValueType}, + Error, + ExternRef, + FuncRef, + FuncType, +}; +use ir::Const32; +use wasmparser::VisitOperator; + +macro_rules! impl_visit_operator { + ( @mvp $($rest:tt)* ) => { + impl_visit_operator!(@@skipped $($rest)*); + }; + ( @sign_extension $($rest:tt)* ) => { + impl_visit_operator!(@@skipped $($rest)*); + }; + ( @saturating_float_to_int $($rest:tt)* ) => { + impl_visit_operator!(@@skipped $($rest)*); + }; + ( @bulk_memory $($rest:tt)* ) => { + impl_visit_operator!(@@skipped $($rest)*); + }; + ( @reference_types $($rest:tt)* ) => { + impl_visit_operator!(@@skipped $($rest)*); + }; + ( @tail_call $($rest:tt)* ) => { + impl_visit_operator!(@@skipped $($rest)*); + }; + ( @wide_arithmetic $($rest:tt)* ) => { + impl_visit_operator!(@@skipped $($rest)*); + }; + ( @@skipped $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident $_ann:tt $($rest:tt)* ) => { + // We skip Wasm operators that we already implement manually. + impl_visit_operator!($($rest)*); + }; + ( @$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident $_ann:tt $($rest:tt)* ) => { + // Wildcard match arm for all the other (yet) unsupported Wasm proposals. + fn $visit(&mut self $($(, $arg: $argty)*)?) -> Self::Output { + self.translate_unsupported_operator(stringify!($op)) + } + impl_visit_operator!($($rest)*); + }; + () => {}; +} + +impl FuncTranslator { + /// Called when translating an unsupported Wasm operator. + /// + /// # Note + /// + /// We panic instead of returning an error because unsupported Wasm + /// errors should have been filtered out by the validation procedure + /// already, therefore encountering an unsupported Wasm operator + /// in the function translation procedure can be considered a bug. + pub fn translate_unsupported_operator(&self, name: &str) -> Result<(), Error> { + panic!("tried to translate an unsupported Wasm operator: {name}") + } +} + +impl<'a> VisitOperator<'a> for FuncTranslator { + type Output = Result<(), Error>; + + #[cfg(feature = "simd")] + fn simd_visitor( + &mut self, + ) -> Option<&mut dyn wasmparser::VisitSimdOperator<'a, Output = Self::Output>> { + Some(self) + } + + wasmparser::for_each_visit_operator!(impl_visit_operator); + + fn visit_unreachable(&mut self) -> Self::Output { + bail_unreachable!(self); + self.translate_trap(TrapCode::UnreachableCodeReached) + } + + fn visit_nop(&mut self) -> Self::Output { + Ok(()) + } + + fn visit_block(&mut self, block_ty: wasmparser::BlockType) -> Self::Output { + if !self.reachable { + self.stack.push_unreachable(ControlFrameKind::Block)?; + return Ok(()); + } + let block_ty = BlockType::new(block_ty, &self.module); + let end_label = self.labels.new_label(); + self.stack.push_block(block_ty, end_label)?; + Ok(()) + } + + fn visit_loop(&mut self, block_ty: wasmparser::BlockType) -> Self::Output { + if !self.reachable { + self.stack.push_unreachable(ControlFrameKind::Loop)?; + return Ok(()); + } + let block_ty = BlockType::new(block_ty, &self.module); + let len_params = block_ty.len_params(&self.engine); + let continue_label = self.labels.new_label(); + let consume_fuel = self.stack.consume_fuel_instr(); + self.move_operands_to_temp(usize::from(len_params), consume_fuel)?; + self.pin_label(continue_label); + let consume_fuel = self.instrs.push_consume_fuel_instr()?; + self.stack + .push_loop(block_ty, continue_label, consume_fuel)?; + Ok(()) + } + + fn visit_if(&mut self, block_ty: wasmparser::BlockType) -> Self::Output { + if !self.reachable { + self.stack.push_unreachable(ControlFrameKind::If)?; + return Ok(()); + } + let end_label = self.labels.new_label(); + let condition = self.stack.pop(); + let (reachability, consume_fuel_instr) = match condition { + Operand::Immediate(operand) => { + let condition = i32::from(operand.val()); + let reachability = match condition { + 0 => { + self.reachable = false; + IfReachability::OnlyElse + } + _ => IfReachability::OnlyThen, + }; + let consume_fuel_instr = self.stack.consume_fuel_instr(); + (reachability, consume_fuel_instr) + } + _ => { + let else_label = self.labels.new_label(); + self.encode_br_eqz(condition, else_label)?; + let reachability = IfReachability::Both { else_label }; + let consume_fuel_instr = self.instrs.push_consume_fuel_instr()?; + (reachability, consume_fuel_instr) + } + }; + let block_ty = BlockType::new(block_ty, &self.module); + self.stack + .push_if(block_ty, end_label, reachability, consume_fuel_instr)?; + Ok(()) + } + + fn visit_else(&mut self) -> Self::Output { + let mut frame = match self.stack.pop_control() { + ControlFrame::If(frame) => frame, + ControlFrame::Unreachable(ControlFrameKind::If) => { + debug_assert!(!self.reachable); + self.stack.push_unreachable(ControlFrameKind::Else)?; + return Ok(()); + } + unexpected => panic!("expected `if` control frame but found: {unexpected:?}"), + }; + // After `then` block, before `else` block: + // - Copy `if` branch parameters. + // - Branch from end of `then` to end of `if`. + let is_end_of_then_reachable = self.reachable; + if let IfReachability::Both { else_label } = frame.reachability() { + if is_end_of_then_reachable { + let consume_fuel_instr = frame.consume_fuel_instr(); + self.copy_branch_params(&frame, consume_fuel_instr)?; + frame.branch_to(); + self.encode_br(frame.label())?; + } + // Start of `else` block: + self.labels + .pin_label(else_label, self.instrs.next_instr()) + .unwrap(); + } + let consume_fuel_instr = self.instrs.push_consume_fuel_instr()?; + self.reachable = frame.is_else_reachable(); + self.stack + .push_else(frame, is_end_of_then_reachable, consume_fuel_instr)?; + Ok(()) + } + + fn visit_end(&mut self) -> Self::Output { + match self.stack.pop_control() { + ControlFrame::Block(frame) => self.translate_end_block(frame), + ControlFrame::Loop(frame) => self.translate_end_loop(frame), + ControlFrame::If(frame) => self.translate_end_if(frame), + ControlFrame::Else(frame) => self.translate_end_else(frame), + ControlFrame::Unreachable(frame) => self.translate_end_unreachable(frame), + } + } + + fn visit_br(&mut self, depth: u32) -> Self::Output { + bail_unreachable!(self); + let Ok(depth) = usize::try_from(depth) else { + panic!("out of bounds depth: {depth}") + }; + let consume_fuel_instr = self.stack.consume_fuel_instr(); + match self.stack.peek_control_mut(depth) { + AcquiredTarget::Return(_) => self.visit_return(), + AcquiredTarget::Branch(mut frame) => { + frame.branch_to(); + let label = frame.label(); + let len_params = frame.len_branch_params(&self.engine); + let branch_results = Self::frame_results_impl(&frame, &self.engine, &self.layout)?; + if let Some(branch_results) = branch_results { + self.encode_copies(branch_results, len_params, consume_fuel_instr)?; + } + self.encode_br(label)?; + self.reachable = false; + Ok(()) + } + } + } + + fn visit_br_if(&mut self, depth: u32) -> Self::Output { + bail_unreachable!(self); + let condition = self.stack.pop(); + if let Operand::Immediate(condition) = condition { + if i32::from(condition.val()) != 0 { + // Case (true): always takes the branch + self.visit_br(depth)?; + } + return Ok(()); + } + let Ok(depth) = usize::try_from(depth) else { + panic!("out of bounds depth: {depth}") + }; + let mut frame = self.stack.peek_control_mut(depth).control_frame(); + frame.branch_to(); + let len_branch_params = frame.len_branch_params(&self.engine); + let branch_results = Self::frame_results_impl(&frame, &self.engine, &self.layout)?; + let label = frame.label(); + if len_branch_params == 0 { + // Case: no branch values are required to be copied + self.encode_br_nez(condition, label)?; + return Ok(()); + } + if !self.requires_branch_param_copies(depth) { + // Case: no branch values are required to be copied + self.encode_br_nez(condition, label)?; + return Ok(()); + } + // Case: fallback to copy branch parameters conditionally + let consume_fuel_instr = self.stack.consume_fuel_instr(); + let skip_label = self.labels.new_label(); + self.encode_br_eqz(condition, skip_label)?; + if let Some(branch_results) = branch_results { + self.encode_copies(branch_results, len_branch_params, consume_fuel_instr)?; + } + self.encode_br(label)?; + self.labels + .pin_label(skip_label, self.instrs.next_instr()) + .unwrap(); + Ok(()) + } + + fn visit_br_table(&mut self, table: wasmparser::BrTable<'a>) -> Self::Output { + bail_unreachable!(self); + let index = self.stack.pop(); + let default_target = table.default(); + if table.is_empty() { + // Case: the `br_table` only has a single target `t` which is equal to a `br t`. + return self.visit_br(default_target); + } + if let Operand::Immediate(index) = index { + // Case: the `br_table` index is a constant value, therefore always taking the same branch. + // Note: `usize::MAX` is used to fallback to the default target. + let chosen_index = usize::try_from(u32::from(index.val())).unwrap_or(usize::MAX); + let chosen_target = table + .targets() + .nth(chosen_index) + .transpose()? + .unwrap_or(default_target); + return self.visit_br(chosen_target); + } + Self::copy_targets_from_br_table(&table, &mut self.immediates)?; + let targets = &self.immediates[..]; + if targets + .iter() + .all(|&target| u32::from(target) == default_target) + { + // Case: all targets are the same and thus the `br_table` is equal to a `br`. + return self.visit_br(default_target); + } + // Note: The Wasm spec mandates that all `br_table` targets manipulate the + // Wasm value stack the same. This implies for Wasmi that all `br_table` + // targets have the same branch parameter arity. + let Ok(default_target) = usize::try_from(default_target) else { + panic!("out of bounds `default_target` does not fit into `usize`: {default_target}"); + }; + let index = self.layout.operand_to_reg(index)?; + let len_branch_params = self + .stack + .peek_control(default_target) + .len_branch_params(&self.engine); + match len_branch_params { + 0 => self.encode_br_table_0(table, index)?, + n => self.encode_br_table_n(table, index, n)?, + }; + self.reachable = false; + Ok(()) + } + + fn visit_return(&mut self) -> Self::Output { + bail_unreachable!(self); + let consume_fuel_instr = self.stack.consume_fuel_instr(); + self.encode_return(consume_fuel_instr)?; + let len_results = self.func_type_with(FuncType::len_results); + for _ in 0..len_results { + self.stack.pop(); + } + self.reachable = false; + Ok(()) + } + + fn visit_call(&mut self, function_index: u32) -> Self::Output { + bail_unreachable!(self); + let func_idx = FuncIdx::from(function_index); + let func_type = self.resolve_func_type(func_idx); + let len_params = usize::from(func_type.len_params()); + let results = self.call_regspan(len_params)?; + let instr = match self.module.get_engine_func(func_idx) { + Some(engine_func) => { + // Case: We are calling an internal function and can optimize + // this case by using the special instruction for it. + match len_params { + 0 => Instruction::call_internal_0(results, engine_func), + _ => Instruction::call_internal(results, engine_func), + } + } + None => { + // Case: We are calling an imported function and must use the + // general calling operator for it. + match len_params { + 0 => Instruction::call_imported_0(results, function_index), + _ => Instruction::call_imported(results, function_index), + } + } + }; + let call_instr = self.push_instr(instr, FuelCostsProvider::call)?; + self.stack.pop_n(len_params, &mut self.operands); + self.instrs + .encode_register_list(&self.operands, &mut self.layout)?; + if let Some(span) = self.push_results(call_instr, func_type.results())? { + debug_assert_eq!(span, results); + } + Ok(()) + } + + fn visit_call_indirect(&mut self, type_index: u32, table_index: u32) -> Self::Output { + bail_unreachable!(self); + let func_type = self.resolve_type(type_index); + let index = self.stack.pop(); + let indirect_params = self.call_indirect_params(index, table_index)?; + let len_params = usize::from(func_type.len_params()); + let results = self.call_regspan(len_params)?; + let instr = match (len_params, indirect_params) { + (0, Instruction::CallIndirectParams { .. }) => { + Instruction::call_indirect_0(results, type_index) + } + (0, Instruction::CallIndirectParamsImm16 { .. }) => { + Instruction::call_indirect_0_imm16(results, type_index) + } + (_, Instruction::CallIndirectParams { .. }) => { + Instruction::call_indirect(results, type_index) + } + (_, Instruction::CallIndirectParamsImm16 { .. }) => { + Instruction::call_indirect_imm16(results, type_index) + } + _ => unreachable!(), + }; + let call_instr = self.push_instr(instr, FuelCostsProvider::call)?; + self.push_param(indirect_params)?; + self.stack.pop_n(len_params, &mut self.operands); + self.instrs + .encode_register_list(&self.operands, &mut self.layout)?; + if let Some(span) = self.push_results(call_instr, func_type.results())? { + debug_assert_eq!(span, results); + } + Ok(()) + } + + fn visit_drop(&mut self) -> Self::Output { + bail_unreachable!(self); + _ = self.stack.pop(); + Ok(()) + } + + fn visit_select(&mut self) -> Self::Output { + self.translate_select(None) + } + + fn visit_local_get(&mut self, local_index: u32) -> Self::Output { + bail_unreachable!(self); + self.stack.push_local(LocalIdx::from(local_index))?; + Ok(()) + } + + fn visit_local_set(&mut self, local_index: u32) -> Self::Output { + self.translate_local_set(local_index, false) + } + + fn visit_local_tee(&mut self, local_index: u32) -> Self::Output { + self.translate_local_set(local_index, true) + } + + fn visit_global_get(&mut self, global_index: u32) -> Self::Output { + bail_unreachable!(self); + let global_idx = module::GlobalIdx::from(global_index); + let (global_type, init_value) = self.module.get_global(global_idx); + let content = global_type.content(); + if let (Mutability::Const, Some(init_expr)) = (global_type.mutability(), init_value) { + if let Some(value) = init_expr.eval_const() { + // Case: access to immutable internally defined global variables + // can be replaced with their constant initialization value. + self.stack.push_immediate(TypedVal::new(content, value))?; + return Ok(()); + } + if let Some(func_index) = init_expr.funcref() { + // Case: forward to `ref.func x` translation. + self.visit_ref_func(func_index.into_u32())?; + return Ok(()); + } + } + // Case: The `global.get` instruction accesses a mutable or imported + // global variable and thus cannot be optimized away. + let global_idx = ir::index::Global::from(global_index); + self.push_instr_with_result( + content, + |result| Instruction::global_get(result, global_idx), + FuelCostsProvider::instance, + )?; + Ok(()) + } + + fn visit_global_set(&mut self, global_index: u32) -> Self::Output { + bail_unreachable!(self); + let global = ir::index::Global::from(global_index); + let input = match self.stack.pop() { + Operand::Immediate(input) => input.val(), + input => { + // Case: `global.set` with simple register input. + let input = self.layout.operand_to_reg(input)?; + self.push_instr( + Instruction::global_set(input, global), + FuelCostsProvider::instance, + )?; + return Ok(()); + } + }; + // Note: at this point we handle the different immediate `global.set` instructions. + let (global_type, _init_value) = self + .module + .get_global(module::GlobalIdx::from(global_index)); + debug_assert_eq!(global_type.content(), input.ty()); + match global_type.content() { + ValType::I32 => { + if let Ok(value) = Const16::try_from(i32::from(input)) { + // Case: `global.set` with 16-bit encoded `i32` value. + self.push_instr( + Instruction::global_set_i32imm16(value, global), + FuelCostsProvider::instance, + )?; + return Ok(()); + } + } + ValType::I64 => { + if let Ok(value) = Const16::try_from(i64::from(input)) { + // Case: `global.set` with 16-bit encoded `i64` value. + self.push_instr( + Instruction::global_set_i64imm16(value, global), + FuelCostsProvider::instance, + )?; + return Ok(()); + } + } + _ => {} + }; + // Note: at this point we have to allocate a function local constant. + let cref = self.layout.const_to_reg(input)?; + self.push_instr( + Instruction::global_set(cref, global), + FuelCostsProvider::instance, + )?; + Ok(()) + } + + fn visit_i32_load(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::I32, + Instruction::load32, + Instruction::load32_offset16, + Instruction::load32_at, + ) + } + + fn visit_i64_load(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::I64, + Instruction::load64, + Instruction::load64_offset16, + Instruction::load64_at, + ) + } + + fn visit_f32_load(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::F32, + Instruction::load32, + Instruction::load32_offset16, + Instruction::load32_at, + ) + } + + fn visit_f64_load(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::F64, + Instruction::load64, + Instruction::load64_offset16, + Instruction::load64_at, + ) + } + + fn visit_i32_load8_s(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::I32, + Instruction::i32_load8_s, + Instruction::i32_load8_s_offset16, + Instruction::i32_load8_s_at, + ) + } + + fn visit_i32_load8_u(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::I32, + Instruction::i32_load8_u, + Instruction::i32_load8_u_offset16, + Instruction::i32_load8_u_at, + ) + } + + fn visit_i32_load16_s(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::I32, + Instruction::i32_load16_s, + Instruction::i32_load16_s_offset16, + Instruction::i32_load16_s_at, + ) + } + + fn visit_i32_load16_u(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::I32, + Instruction::i32_load16_u, + Instruction::i32_load16_u_offset16, + Instruction::i32_load16_u_at, + ) + } + + fn visit_i64_load8_s(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::I64, + Instruction::i64_load8_s, + Instruction::i64_load8_s_offset16, + Instruction::i64_load8_s_at, + ) + } + + fn visit_i64_load8_u(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::I64, + Instruction::i64_load8_u, + Instruction::i64_load8_u_offset16, + Instruction::i64_load8_u_at, + ) + } + + fn visit_i64_load16_s(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::I64, + Instruction::i64_load16_s, + Instruction::i64_load16_s_offset16, + Instruction::i64_load16_s_at, + ) + } + + fn visit_i64_load16_u(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::I64, + Instruction::i64_load16_u, + Instruction::i64_load16_u_offset16, + Instruction::i64_load16_u_at, + ) + } + + fn visit_i64_load32_s(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::I64, + Instruction::i64_load32_s, + Instruction::i64_load32_s_offset16, + Instruction::i64_load32_s_at, + ) + } + + fn visit_i64_load32_u(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_load( + memarg, + ValType::I64, + Instruction::i64_load32_u, + Instruction::i64_load32_u_offset16, + Instruction::i64_load32_u_at, + ) + } + + fn visit_i32_store(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_istore_wrap::(memarg) + } + + fn visit_i64_store(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_istore_wrap::(memarg) + } + + fn visit_f32_store(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_store( + memarg, + Instruction::store32, + Instruction::store32_offset16, + Instruction::store32_at, + ) + } + + fn visit_f64_store(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_store( + memarg, + Instruction::store64, + Instruction::store64_offset16, + Instruction::store64_at, + ) + } + + fn visit_i32_store8(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_istore_wrap::(memarg) + } + + fn visit_i32_store16(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_istore_wrap::(memarg) + } + + fn visit_i64_store8(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_istore_wrap::(memarg) + } + + fn visit_i64_store16(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_istore_wrap::(memarg) + } + + fn visit_i64_store32(&mut self, memarg: wasmparser::MemArg) -> Self::Output { + self.translate_istore_wrap::(memarg) + } + + fn visit_memory_size(&mut self, mem: u32) -> Self::Output { + bail_unreachable!(self); + let index_ty = self + .module + .get_type_of_memory(MemoryIdx::from(mem)) + .index_ty() + .ty(); + self.push_instr_with_result( + index_ty, + |result| Instruction::memory_size(result, mem), + FuelCostsProvider::instance, + )?; + Ok(()) + } + + fn visit_memory_grow(&mut self, mem: u32) -> Self::Output { + bail_unreachable!(self); + let index_ty = self + .module + .get_type_of_memory(MemoryIdx::from(mem)) + .index_ty(); + let delta = self.stack.pop(); + if let Operand::Immediate(delta) = delta { + let delta = delta.val(); + let delta = match index_ty { + IndexType::I32 => u64::from(u32::from(delta)), + IndexType::I64 => u64::from(delta), + }; + if delta == 0 { + // Case: growing by 0 pages. + // + // Since `memory.grow` returns the `memory.size` before the + // operation a `memory.grow` with `delta` of 0 can be translated + // as `memory.size` instruction instead. + self.push_instr_with_result( + index_ty.ty(), + |result| Instruction::memory_size(result, mem), + FuelCostsProvider::instance, + )?; + return Ok(()); + } + if let Ok(delta) = >::try_from(delta) { + // Case: delta can be 32-bit encoded + self.push_instr_with_result( + index_ty.ty(), + |result| Instruction::memory_grow_imm(result, delta), + FuelCostsProvider::instance, + )?; + self.push_param(Instruction::memory_index(mem))?; + return Ok(()); + } + } + // Case: fallback to generic `memory.grow` instruction + let delta = self.layout.operand_to_reg(delta)?; + self.push_instr_with_result( + index_ty.ty(), + |result| Instruction::memory_grow(result, delta), + FuelCostsProvider::instance, + )?; + self.push_param(Instruction::memory_index(mem))?; + Ok(()) + } + + fn visit_i32_const(&mut self, value: i32) -> Self::Output { + bail_unreachable!(self); + self.stack.push_immediate(value)?; + Ok(()) + } + + fn visit_i64_const(&mut self, value: i64) -> Self::Output { + bail_unreachable!(self); + self.stack.push_immediate(value)?; + Ok(()) + } + + fn visit_f32_const(&mut self, value: wasmparser::Ieee32) -> Self::Output { + bail_unreachable!(self); + let value = F32::from_bits(value.bits()); + self.stack.push_immediate(value)?; + Ok(()) + } + + fn visit_f64_const(&mut self, value: wasmparser::Ieee64) -> Self::Output { + bail_unreachable!(self); + let value = F64::from_bits(value.bits()); + self.stack.push_immediate(value)?; + Ok(()) + } + + fn visit_i32_eqz(&mut self) -> Self::Output { + bail_unreachable!(self); + self.stack.push_immediate(0_i32)?; + self.visit_i32_eq() + } + + fn visit_i32_eq(&mut self) -> Self::Output { + self.translate_binary_commutative::( + Instruction::i32_eq, + Instruction::i32_eq_imm16, + wasm::i32_eq, + FuncTranslator::fuse_eqz, + ) + } + + fn visit_i32_ne(&mut self) -> Self::Output { + self.translate_binary_commutative::( + Instruction::i32_ne, + Instruction::i32_ne_imm16, + wasm::i32_ne, + FuncTranslator::fuse_nez, + ) + } + + fn visit_i32_lt_s(&mut self) -> Self::Output { + self.translate_binary::( + Instruction::i32_lt_s, + Instruction::i32_lt_s_imm16_rhs, + Instruction::i32_lt_s_imm16_lhs, + wasm::i32_lt_s, + ) + } + + fn visit_i32_lt_u(&mut self) -> Self::Output { + self.translate_binary::( + Instruction::i32_lt_u, + Instruction::i32_lt_u_imm16_rhs, + Instruction::i32_lt_u_imm16_lhs, + wasm::i32_lt_u, + ) + } + + fn visit_i32_gt_s(&mut self) -> Self::Output { + self.translate_binary::( + swap_ops!(Instruction::i32_lt_s), + swap_ops!(Instruction::i32_lt_s_imm16_lhs), + swap_ops!(Instruction::i32_lt_s_imm16_rhs), + wasm::i32_gt_s, + ) + } + + fn visit_i32_gt_u(&mut self) -> Self::Output { + self.translate_binary::( + swap_ops!(Instruction::i32_lt_u), + swap_ops!(Instruction::i32_lt_u_imm16_lhs), + swap_ops!(Instruction::i32_lt_u_imm16_rhs), + wasm::i32_gt_u, + ) + } + + fn visit_i32_le_s(&mut self) -> Self::Output { + self.translate_binary::( + Instruction::i32_le_s, + Instruction::i32_le_s_imm16_rhs, + Instruction::i32_le_s_imm16_lhs, + wasm::i32_le_s, + ) + } + + fn visit_i32_le_u(&mut self) -> Self::Output { + self.translate_binary::( + Instruction::i32_le_u, + Instruction::i32_le_u_imm16_rhs, + Instruction::i32_le_u_imm16_lhs, + wasm::i32_le_u, + ) + } + + fn visit_i32_ge_s(&mut self) -> Self::Output { + self.translate_binary::( + swap_ops!(Instruction::i32_le_s), + swap_ops!(Instruction::i32_le_s_imm16_lhs), + swap_ops!(Instruction::i32_le_s_imm16_rhs), + wasm::i32_ge_s, + ) + } + + fn visit_i32_ge_u(&mut self) -> Self::Output { + self.translate_binary::( + swap_ops!(Instruction::i32_le_u), + swap_ops!(Instruction::i32_le_u_imm16_lhs), + swap_ops!(Instruction::i32_le_u_imm16_rhs), + wasm::i32_ge_u, + ) + } + + fn visit_i64_eqz(&mut self) -> Self::Output { + bail_unreachable!(self); + self.stack.push_immediate(0_i64)?; + self.visit_i64_eq() + } + + fn visit_i64_eq(&mut self) -> Self::Output { + self.translate_binary_commutative::( + Instruction::i64_eq, + Instruction::i64_eq_imm16, + wasm::i64_eq, + FuncTranslator::fuse_eqz, + ) + } + + fn visit_i64_ne(&mut self) -> Self::Output { + self.translate_binary_commutative::( + Instruction::i64_ne, + Instruction::i64_ne_imm16, + wasm::i64_ne, + FuncTranslator::fuse_nez, + ) + } + + fn visit_i64_lt_s(&mut self) -> Self::Output { + self.translate_binary::( + Instruction::i64_lt_s, + Instruction::i64_lt_s_imm16_rhs, + Instruction::i64_lt_s_imm16_lhs, + wasm::i64_lt_s, + ) + } + + fn visit_i64_lt_u(&mut self) -> Self::Output { + self.translate_binary::( + Instruction::i64_lt_u, + Instruction::i64_lt_u_imm16_rhs, + Instruction::i64_lt_u_imm16_lhs, + wasm::i64_lt_u, + ) + } + + fn visit_i64_gt_s(&mut self) -> Self::Output { + self.translate_binary::( + swap_ops!(Instruction::i64_lt_s), + swap_ops!(Instruction::i64_lt_s_imm16_lhs), + swap_ops!(Instruction::i64_lt_s_imm16_rhs), + wasm::i64_gt_s, + ) + } + + fn visit_i64_gt_u(&mut self) -> Self::Output { + self.translate_binary::( + swap_ops!(Instruction::i64_lt_u), + swap_ops!(Instruction::i64_lt_u_imm16_lhs), + swap_ops!(Instruction::i64_lt_u_imm16_rhs), + wasm::i64_gt_u, + ) + } + + fn visit_i64_le_s(&mut self) -> Self::Output { + self.translate_binary::( + Instruction::i64_le_s, + Instruction::i64_le_s_imm16_rhs, + Instruction::i64_le_s_imm16_lhs, + wasm::i64_le_s, + ) + } + + fn visit_i64_le_u(&mut self) -> Self::Output { + self.translate_binary::( + Instruction::i64_le_u, + Instruction::i64_le_u_imm16_rhs, + Instruction::i64_le_u_imm16_lhs, + wasm::i64_le_u, + ) + } + + fn visit_i64_ge_s(&mut self) -> Self::Output { + self.translate_binary::( + swap_ops!(Instruction::i64_le_s), + swap_ops!(Instruction::i64_le_s_imm16_lhs), + swap_ops!(Instruction::i64_le_s_imm16_rhs), + wasm::i64_ge_s, + ) + } + + fn visit_i64_ge_u(&mut self) -> Self::Output { + self.translate_binary::( + swap_ops!(Instruction::i64_le_u), + swap_ops!(Instruction::i64_le_u_imm16_lhs), + swap_ops!(Instruction::i64_le_u_imm16_rhs), + wasm::i64_ge_u, + ) + } + + fn visit_f32_eq(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f32_eq, wasm::f32_eq) + } + + fn visit_f32_ne(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f32_ne, wasm::f32_ne) + } + + fn visit_f32_lt(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f32_lt, wasm::f32_lt) + } + + fn visit_f32_gt(&mut self) -> Self::Output { + self.translate_fbinary(swap_ops!(Instruction::f32_lt), wasm::f32_gt) + } + + fn visit_f32_le(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f32_le, wasm::f32_le) + } + + fn visit_f32_ge(&mut self) -> Self::Output { + self.translate_fbinary(swap_ops!(Instruction::f32_le), wasm::f32_ge) + } + + fn visit_f64_eq(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f64_eq, wasm::f64_eq) + } + + fn visit_f64_ne(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f64_ne, wasm::f64_ne) + } + + fn visit_f64_lt(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f64_lt, wasm::f64_lt) + } + + fn visit_f64_gt(&mut self) -> Self::Output { + self.translate_fbinary(swap_ops!(Instruction::f64_lt), wasm::f64_gt) + } + + fn visit_f64_le(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f64_le, wasm::f64_le) + } + + fn visit_f64_ge(&mut self) -> Self::Output { + self.translate_fbinary(swap_ops!(Instruction::f64_le), wasm::f64_ge) + } + + fn visit_i32_clz(&mut self) -> Self::Output { + self.translate_unary::(Instruction::i32_clz, wasm::i32_clz) + } + + fn visit_i32_ctz(&mut self) -> Self::Output { + self.translate_unary::(Instruction::i32_ctz, wasm::i32_ctz) + } + + fn visit_i32_popcnt(&mut self) -> Self::Output { + self.translate_unary::(Instruction::i32_popcnt, wasm::i32_popcnt) + } + + fn visit_i32_add(&mut self) -> Self::Output { + self.translate_binary_commutative::( + Instruction::i32_add, + Instruction::i32_add_imm16, + wasm::i32_add, + FuncTranslator::no_opt_ri, + ) + } + + fn visit_i32_sub(&mut self) -> Self::Output { + self.translate_isub( + Instruction::i32_sub, + Instruction::i32_add_imm16, + Instruction::i32_sub_imm16_lhs, + wasm::i32_sub, + ) + } + + fn visit_i32_mul(&mut self) -> Self::Output { + self.translate_binary_commutative::( + Instruction::i32_mul, + Instruction::i32_mul_imm16, + wasm::i32_mul, + FuncTranslator::no_opt_ri, + ) + } + + fn visit_i32_div_s(&mut self) -> Self::Output { + self.translate_divrem::( + Instruction::i32_div_s, + Instruction::i32_div_s_imm16_rhs, + Instruction::i32_div_s_imm16_lhs, + wasm::i32_div_s, + ) + } + + fn visit_i32_div_u(&mut self) -> Self::Output { + self.translate_divrem::( + Instruction::i32_div_u, + Instruction::i32_div_u_imm16_rhs, + Instruction::i32_div_u_imm16_lhs, + wasm::i32_div_u, + ) + } + + fn visit_i32_rem_s(&mut self) -> Self::Output { + self.translate_divrem::( + Instruction::i32_rem_s, + Instruction::i32_rem_s_imm16_rhs, + Instruction::i32_rem_s_imm16_lhs, + wasm::i32_rem_s, + ) + } + + fn visit_i32_rem_u(&mut self) -> Self::Output { + self.translate_divrem::( + Instruction::i32_rem_u, + Instruction::i32_rem_u_imm16_rhs, + Instruction::i32_rem_u_imm16_lhs, + wasm::i32_rem_u, + ) + } + + fn visit_i32_and(&mut self) -> Self::Output { + self.translate_binary_commutative::( + Instruction::i32_bitand, + Instruction::i32_bitand_imm16, + wasm::i32_bitand, + FuncTranslator::no_opt_ri, + ) + } + + fn visit_i32_or(&mut self) -> Self::Output { + self.translate_binary_commutative::( + Instruction::i32_bitor, + Instruction::i32_bitor_imm16, + wasm::i32_bitor, + FuncTranslator::no_opt_ri, + ) + } + + fn visit_i32_xor(&mut self) -> Self::Output { + self.translate_binary_commutative::( + Instruction::i32_bitxor, + Instruction::i32_bitxor_imm16, + wasm::i32_bitxor, + FuncTranslator::no_opt_ri, + ) + } + + fn visit_i32_shl(&mut self) -> Self::Output { + self.translate_shift::( + Instruction::i32_shl, + Instruction::i32_shl_by, + Instruction::i32_shl_imm16, + wasm::i32_shl, + ) + } + + fn visit_i32_shr_s(&mut self) -> Self::Output { + self.translate_shift::( + Instruction::i32_shr_s, + Instruction::i32_shr_s_by, + Instruction::i32_shr_s_imm16, + wasm::i32_shr_s, + ) + } + + fn visit_i32_shr_u(&mut self) -> Self::Output { + self.translate_shift::( + Instruction::i32_shr_u, + Instruction::i32_shr_u_by, + Instruction::i32_shr_u_imm16, + wasm::i32_shr_u, + ) + } + + fn visit_i32_rotl(&mut self) -> Self::Output { + self.translate_shift::( + Instruction::i32_rotl, + Instruction::i32_rotl_by, + Instruction::i32_rotl_imm16, + wasm::i32_rotl, + ) + } + + fn visit_i32_rotr(&mut self) -> Self::Output { + self.translate_shift::( + Instruction::i32_rotr, + Instruction::i32_rotr_by, + Instruction::i32_rotr_imm16, + wasm::i32_rotr, + ) + } + + fn visit_i64_clz(&mut self) -> Self::Output { + self.translate_unary::(Instruction::i64_clz, wasm::i64_clz) + } + + fn visit_i64_ctz(&mut self) -> Self::Output { + self.translate_unary::(Instruction::i64_ctz, wasm::i64_ctz) + } + + fn visit_i64_popcnt(&mut self) -> Self::Output { + self.translate_unary::(Instruction::i64_popcnt, wasm::i64_popcnt) + } + + fn visit_i64_add(&mut self) -> Self::Output { + self.translate_binary_commutative::( + Instruction::i64_add, + Instruction::i64_add_imm16, + wasm::i64_add, + FuncTranslator::no_opt_ri, + ) + } + + fn visit_i64_sub(&mut self) -> Self::Output { + self.translate_isub( + Instruction::i64_sub, + Instruction::i64_add_imm16, + Instruction::i64_sub_imm16_lhs, + wasm::i64_sub, + ) + } + + fn visit_i64_mul(&mut self) -> Self::Output { + self.translate_binary_commutative::( + Instruction::i64_mul, + Instruction::i64_mul_imm16, + wasm::i64_mul, + FuncTranslator::no_opt_ri, + ) + } + + fn visit_i64_div_s(&mut self) -> Self::Output { + self.translate_divrem::( + Instruction::i64_div_s, + Instruction::i64_div_s_imm16_rhs, + Instruction::i64_div_s_imm16_lhs, + wasm::i64_div_s, + ) + } + + fn visit_i64_div_u(&mut self) -> Self::Output { + self.translate_divrem::( + Instruction::i64_div_u, + Instruction::i64_div_u_imm16_rhs, + Instruction::i64_div_u_imm16_lhs, + wasm::i64_div_u, + ) + } + + fn visit_i64_rem_s(&mut self) -> Self::Output { + self.translate_divrem::( + Instruction::i64_rem_s, + Instruction::i64_rem_s_imm16_rhs, + Instruction::i64_rem_s_imm16_lhs, + wasm::i64_rem_s, + ) + } + + fn visit_i64_rem_u(&mut self) -> Self::Output { + self.translate_divrem::( + Instruction::i64_rem_u, + Instruction::i64_rem_u_imm16_rhs, + Instruction::i64_rem_u_imm16_lhs, + wasm::i64_rem_u, + ) + } + + fn visit_i64_and(&mut self) -> Self::Output { + self.translate_binary_commutative::( + Instruction::i64_bitand, + Instruction::i64_bitand_imm16, + wasm::i64_bitand, + FuncTranslator::no_opt_ri, + ) + } + + fn visit_i64_or(&mut self) -> Self::Output { + self.translate_binary_commutative::( + Instruction::i64_bitor, + Instruction::i64_bitor_imm16, + wasm::i64_bitor, + FuncTranslator::no_opt_ri, + ) + } + + fn visit_i64_xor(&mut self) -> Self::Output { + self.translate_binary_commutative::( + Instruction::i64_bitxor, + Instruction::i64_bitxor_imm16, + wasm::i64_bitxor, + FuncTranslator::no_opt_ri, + ) + } + + fn visit_i64_shl(&mut self) -> Self::Output { + self.translate_shift::( + Instruction::i64_shl, + Instruction::i64_shl_by, + Instruction::i64_shl_imm16, + wasm::i64_shl, + ) + } + + fn visit_i64_shr_s(&mut self) -> Self::Output { + self.translate_shift::( + Instruction::i64_shr_s, + Instruction::i64_shr_s_by, + Instruction::i64_shr_s_imm16, + wasm::i64_shr_s, + ) + } + + fn visit_i64_shr_u(&mut self) -> Self::Output { + self.translate_shift::( + Instruction::i64_shr_u, + Instruction::i64_shr_u_by, + Instruction::i64_shr_u_imm16, + wasm::i64_shr_u, + ) + } + + fn visit_i64_rotl(&mut self) -> Self::Output { + self.translate_shift::( + Instruction::i64_rotl, + Instruction::i64_rotl_by, + Instruction::i64_rotl_imm16, + wasm::i64_rotl, + ) + } + + fn visit_i64_rotr(&mut self) -> Self::Output { + self.translate_shift::( + Instruction::i64_rotr, + Instruction::i64_rotr_by, + Instruction::i64_rotr_imm16, + wasm::i64_rotr, + ) + } + + fn visit_f32_abs(&mut self) -> Self::Output { + self.translate_unary(Instruction::f32_abs, wasm::f32_abs) + } + + fn visit_f32_neg(&mut self) -> Self::Output { + self.translate_unary(Instruction::f32_neg, wasm::f32_neg) + } + + fn visit_f32_ceil(&mut self) -> Self::Output { + self.translate_unary(Instruction::f32_ceil, wasm::f32_ceil) + } + + fn visit_f32_floor(&mut self) -> Self::Output { + self.translate_unary(Instruction::f32_floor, wasm::f32_floor) + } + + fn visit_f32_trunc(&mut self) -> Self::Output { + self.translate_unary(Instruction::f32_trunc, wasm::f32_trunc) + } + + fn visit_f32_nearest(&mut self) -> Self::Output { + self.translate_unary(Instruction::f32_nearest, wasm::f32_nearest) + } + + fn visit_f32_sqrt(&mut self) -> Self::Output { + self.translate_unary(Instruction::f32_sqrt, wasm::f32_sqrt) + } + + fn visit_f32_add(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f32_add, wasm::f32_add) + } + + fn visit_f32_sub(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f32_sub, wasm::f32_sub) + } + + fn visit_f32_mul(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f32_mul, wasm::f32_mul) + } + + fn visit_f32_div(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f32_div, wasm::f32_div) + } + + fn visit_f32_min(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f32_min, wasm::f32_min) + } + + fn visit_f32_max(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f32_max, wasm::f32_max) + } + + fn visit_f32_copysign(&mut self) -> Self::Output { + self.translate_fcopysign::( + Instruction::f32_copysign, + Instruction::f32_copysign_imm, + wasm::f32_copysign, + ) + } + + fn visit_f64_abs(&mut self) -> Self::Output { + self.translate_unary(Instruction::f64_abs, wasm::f64_abs) + } + + fn visit_f64_neg(&mut self) -> Self::Output { + self.translate_unary(Instruction::f64_neg, wasm::f64_neg) + } + + fn visit_f64_ceil(&mut self) -> Self::Output { + self.translate_unary(Instruction::f64_ceil, wasm::f64_ceil) + } + + fn visit_f64_floor(&mut self) -> Self::Output { + self.translate_unary(Instruction::f64_floor, wasm::f64_floor) + } + + fn visit_f64_trunc(&mut self) -> Self::Output { + self.translate_unary(Instruction::f64_trunc, wasm::f64_trunc) + } + + fn visit_f64_nearest(&mut self) -> Self::Output { + self.translate_unary(Instruction::f64_nearest, wasm::f64_nearest) + } + + fn visit_f64_sqrt(&mut self) -> Self::Output { + self.translate_unary(Instruction::f64_sqrt, wasm::f64_sqrt) + } + + fn visit_f64_add(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f64_add, wasm::f64_add) + } + + fn visit_f64_sub(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f64_sub, wasm::f64_sub) + } + + fn visit_f64_mul(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f64_mul, wasm::f64_mul) + } + + fn visit_f64_div(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f64_div, wasm::f64_div) + } + + fn visit_f64_min(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f64_min, wasm::f64_min) + } + + fn visit_f64_max(&mut self) -> Self::Output { + self.translate_fbinary(Instruction::f64_max, wasm::f64_max) + } + + fn visit_f64_copysign(&mut self) -> Self::Output { + self.translate_fcopysign::( + Instruction::f64_copysign, + Instruction::f64_copysign_imm, + wasm::f64_copysign, + ) + } + + fn visit_i32_wrap_i64(&mut self) -> Self::Output { + self.translate_unary(Instruction::i32_wrap_i64, wasm::i32_wrap_i64) + } + + fn visit_i32_trunc_f32_s(&mut self) -> Self::Output { + self.translate_unary_fallible(Instruction::i32_trunc_f32_s, wasm::i32_trunc_f32_s) + } + + fn visit_i32_trunc_f32_u(&mut self) -> Self::Output { + self.translate_unary_fallible(Instruction::i32_trunc_f32_u, wasm::i32_trunc_f32_u) + } + + fn visit_i32_trunc_f64_s(&mut self) -> Self::Output { + self.translate_unary_fallible(Instruction::i32_trunc_f64_s, wasm::i32_trunc_f64_s) + } + + fn visit_i32_trunc_f64_u(&mut self) -> Self::Output { + self.translate_unary_fallible(Instruction::i32_trunc_f64_u, wasm::i32_trunc_f64_u) + } + + fn visit_i64_extend_i32_s(&mut self) -> Self::Output { + self.translate_unary::(Instruction::i64_extend32_s, wasm::i64_extend_i32_s) + } + + fn visit_i64_extend_i32_u(&mut self) -> Self::Output { + self.translate_reinterpret(wasm::i64_extend_i32_u) + } + + fn visit_i64_trunc_f32_s(&mut self) -> Self::Output { + self.translate_unary_fallible(Instruction::i64_trunc_f32_s, wasm::i64_trunc_f32_s) + } + + fn visit_i64_trunc_f32_u(&mut self) -> Self::Output { + self.translate_unary_fallible(Instruction::i64_trunc_f32_u, wasm::i64_trunc_f32_u) + } + + fn visit_i64_trunc_f64_s(&mut self) -> Self::Output { + self.translate_unary_fallible(Instruction::i64_trunc_f64_s, wasm::i64_trunc_f64_s) + } + + fn visit_i64_trunc_f64_u(&mut self) -> Self::Output { + self.translate_unary_fallible(Instruction::i64_trunc_f64_u, wasm::i64_trunc_f64_u) + } + + fn visit_f32_convert_i32_s(&mut self) -> Self::Output { + self.translate_unary(Instruction::f32_convert_i32_s, wasm::f32_convert_i32_s) + } + + fn visit_f32_convert_i32_u(&mut self) -> Self::Output { + self.translate_unary(Instruction::f32_convert_i32_u, wasm::f32_convert_i32_u) + } + + fn visit_f32_convert_i64_s(&mut self) -> Self::Output { + self.translate_unary(Instruction::f32_convert_i64_s, wasm::f32_convert_i64_s) + } + + fn visit_f32_convert_i64_u(&mut self) -> Self::Output { + self.translate_unary(Instruction::f32_convert_i64_u, wasm::f32_convert_i64_u) + } + + fn visit_f32_demote_f64(&mut self) -> Self::Output { + self.translate_unary(Instruction::f32_demote_f64, wasm::f32_demote_f64) + } + + fn visit_f64_convert_i32_s(&mut self) -> Self::Output { + self.translate_unary(Instruction::f64_convert_i32_s, wasm::f64_convert_i32_s) + } + + fn visit_f64_convert_i32_u(&mut self) -> Self::Output { + self.translate_unary(Instruction::f64_convert_i32_u, wasm::f64_convert_i32_u) + } + + fn visit_f64_convert_i64_s(&mut self) -> Self::Output { + self.translate_unary(Instruction::f64_convert_i64_s, wasm::f64_convert_i64_s) + } + + fn visit_f64_convert_i64_u(&mut self) -> Self::Output { + self.translate_unary(Instruction::f64_convert_i64_u, wasm::f64_convert_i64_u) + } + + fn visit_f64_promote_f32(&mut self) -> Self::Output { + self.translate_unary(Instruction::f64_promote_f32, wasm::f64_promote_f32) + } + + fn visit_i32_reinterpret_f32(&mut self) -> Self::Output { + self.translate_reinterpret(wasm::i32_reinterpret_f32) + } + + fn visit_i64_reinterpret_f64(&mut self) -> Self::Output { + self.translate_reinterpret(wasm::i64_reinterpret_f64) + } + + fn visit_f32_reinterpret_i32(&mut self) -> Self::Output { + self.translate_reinterpret(wasm::f32_reinterpret_i32) + } + + fn visit_f64_reinterpret_i64(&mut self) -> Self::Output { + self.translate_reinterpret(wasm::f64_reinterpret_i64) + } + + fn visit_i32_extend8_s(&mut self) -> Self::Output { + self.translate_unary(Instruction::i32_extend8_s, wasm::i32_extend8_s) + } + + fn visit_i32_extend16_s(&mut self) -> Self::Output { + self.translate_unary(Instruction::i32_extend16_s, wasm::i32_extend16_s) + } + + fn visit_i64_extend8_s(&mut self) -> Self::Output { + self.translate_unary(Instruction::i64_extend8_s, wasm::i64_extend8_s) + } + + fn visit_i64_extend16_s(&mut self) -> Self::Output { + self.translate_unary(Instruction::i64_extend16_s, wasm::i64_extend16_s) + } + + fn visit_i64_extend32_s(&mut self) -> Self::Output { + self.translate_unary(Instruction::i64_extend32_s, wasm::i64_extend32_s) + } + + fn visit_i32_trunc_sat_f32_s(&mut self) -> Self::Output { + self.translate_unary(Instruction::i32_trunc_sat_f32_s, wasm::i32_trunc_sat_f32_s) + } + + fn visit_i32_trunc_sat_f32_u(&mut self) -> Self::Output { + self.translate_unary(Instruction::i32_trunc_sat_f32_u, wasm::i32_trunc_sat_f32_u) + } + + fn visit_i32_trunc_sat_f64_s(&mut self) -> Self::Output { + self.translate_unary(Instruction::i32_trunc_sat_f64_s, wasm::i32_trunc_sat_f64_s) + } + + fn visit_i32_trunc_sat_f64_u(&mut self) -> Self::Output { + self.translate_unary(Instruction::i32_trunc_sat_f64_u, wasm::i32_trunc_sat_f64_u) + } + + fn visit_i64_trunc_sat_f32_s(&mut self) -> Self::Output { + self.translate_unary(Instruction::i64_trunc_sat_f32_s, wasm::i64_trunc_sat_f32_s) + } + + fn visit_i64_trunc_sat_f32_u(&mut self) -> Self::Output { + self.translate_unary(Instruction::i64_trunc_sat_f32_u, wasm::i64_trunc_sat_f32_u) + } + + fn visit_i64_trunc_sat_f64_s(&mut self) -> Self::Output { + self.translate_unary(Instruction::i64_trunc_sat_f64_s, wasm::i64_trunc_sat_f64_s) + } + + fn visit_i64_trunc_sat_f64_u(&mut self) -> Self::Output { + self.translate_unary(Instruction::i64_trunc_sat_f64_u, wasm::i64_trunc_sat_f64_u) + } + + fn visit_memory_init(&mut self, data_index: u32, mem: u32) -> Self::Output { + bail_unreachable!(self); + let (dst, src, len) = self.stack.pop3(); + let dst = self.layout.operand_to_reg(dst)?; + let src = self.layout.operand_to_reg(src)?; + let len = self.make_input16::(len)?; + let instr = match len { + Input::Immediate(len) => Instruction::memory_init_imm(dst, src, len), + Input::Reg(len) => Instruction::memory_init(dst, src, len), + }; + self.push_instr(instr, FuelCostsProvider::instance)?; + self.push_param(Instruction::memory_index(mem))?; + self.push_param(Instruction::data_index(data_index))?; + Ok(()) + } + + fn visit_data_drop(&mut self, data_index: u32) -> Self::Output { + bail_unreachable!(self); + self.push_instr( + Instruction::data_drop(data_index), + FuelCostsProvider::instance, + )?; + Ok(()) + } + + fn visit_memory_copy(&mut self, dst_mem: u32, src_mem: u32) -> Self::Output { + bail_unreachable!(self); + let (dst, src, len) = self.stack.pop3(); + let dst_memory_type = *self.module.get_type_of_memory(MemoryIdx::from(dst_mem)); + let src_memory_type = *self.module.get_type_of_memory(MemoryIdx::from(src_mem)); + let min_index_ty = dst_memory_type.index_ty().min(&src_memory_type.index_ty()); + let dst = self.layout.operand_to_reg(dst)?; + let src = self.layout.operand_to_reg(src)?; + let len = self.make_index16(len, min_index_ty)?; + let instr = match len { + Input::Reg(len) => Instruction::memory_copy(dst, src, len), + Input::Immediate(len) => Instruction::memory_copy_imm(dst, src, len), + }; + self.push_instr(instr, FuelCostsProvider::instance)?; + self.push_param(Instruction::memory_index(dst_mem))?; + self.push_param(Instruction::memory_index(src_mem))?; + Ok(()) + } + + fn visit_memory_fill(&mut self, mem: u32) -> Self::Output { + bail_unreachable!(self); + let memory_type = *self.module.get_type_of_memory(MemoryIdx::from(mem)); + let (dst, value, len) = self.stack.pop3(); + let dst = self.layout.operand_to_reg(dst)?; + let value = self.make_input(value, |_, value| { + let byte = u32::from(value) as u8; + Ok(Input::Immediate(byte)) + })?; + let len = self.make_index16(len, memory_type.index_ty())?; + let instr: Instruction = match (value, len) { + (Input::Reg(value), Input::Reg(len)) => Instruction::memory_fill(dst, value, len), + (Input::Reg(value), Input::Immediate(len)) => { + Instruction::memory_fill_exact(dst, value, len) + } + (Input::Immediate(value), Input::Reg(len)) => { + Instruction::memory_fill_imm(dst, value, len) + } + (Input::Immediate(value), Input::Immediate(len)) => { + Instruction::memory_fill_imm_exact(dst, value, len) + } + }; + self.push_instr(instr, FuelCostsProvider::instance)?; + self.push_param(Instruction::memory_index(mem))?; + Ok(()) + } + + fn visit_table_init(&mut self, elem_index: u32, table: u32) -> Self::Output { + bail_unreachable!(self); + let (dst, src, len) = self.stack.pop3(); + let dst = self.layout.operand_to_reg(dst)?; + let src = self.layout.operand_to_reg(src)?; + let len = self.make_input16::(len)?; + let instr = match len { + Input::Reg(len) => Instruction::table_init(dst, src, len), + Input::Immediate(len) => Instruction::table_init_imm(dst, src, len), + }; + self.push_instr(instr, FuelCostsProvider::instance)?; + self.push_param(Instruction::table_index(table))?; + self.push_param(Instruction::elem_index(elem_index))?; + Ok(()) + } + + fn visit_elem_drop(&mut self, elem_index: u32) -> Self::Output { + bail_unreachable!(self); + self.push_instr( + Instruction::elem_drop(elem_index), + FuelCostsProvider::instance, + )?; + Ok(()) + } + + fn visit_table_copy(&mut self, dst_table: u32, src_table: u32) -> Self::Output { + bail_unreachable!(self); + let (dst, src, len) = self.stack.pop3(); + let dst_table_type = *self.module.get_type_of_table(TableIdx::from(dst_table)); + let src_table_type = *self.module.get_type_of_table(TableIdx::from(src_table)); + let min_index_ty = dst_table_type.index_ty().min(&src_table_type.index_ty()); + let dst = self.layout.operand_to_reg(dst)?; + let src = self.layout.operand_to_reg(src)?; + let len = self.make_index16(len, min_index_ty)?; + let instr = match len { + Input::Reg(len) => Instruction::table_copy(dst, src, len), + Input::Immediate(len) => Instruction::table_copy_imm(dst, src, len), + }; + self.push_instr(instr, FuelCostsProvider::instance)?; + self.push_param(Instruction::table_index(dst_table))?; + self.push_param(Instruction::table_index(src_table))?; + Ok(()) + } + + fn visit_typed_select(&mut self, ty: wasmparser::ValType) -> Self::Output { + let type_hint = WasmiValueType::from(ty).into_inner(); + self.translate_select(Some(type_hint)) + } + + fn visit_ref_null(&mut self, ty: wasmparser::HeapType) -> Self::Output { + bail_unreachable!(self); + let type_hint = WasmiValueType::from(ty).into_inner(); + let null = match type_hint { + ValType::FuncRef => TypedVal::from(FuncRef::null()), + ValType::ExternRef => TypedVal::from(ExternRef::null()), + ty => panic!("expected a Wasm `reftype` but found: {ty:?}"), + }; + self.stack.push_immediate(null)?; + Ok(()) + } + + fn visit_ref_is_null(&mut self) -> Self::Output { + bail_unreachable!(self); + match self.stack.pop() { + Operand::Local(input) => { + // Note: `funcref` and `externref` both serialize to `UntypedValue` + // as `u64` so we can use `i64.eqz` translation for `ref.is_null` + // via reinterpretation of the value's type. + let input = self.layout.local_to_reg(input.local_index())?; + // TODO: improve performance by allowing type overwrites for local operands + self.push_instr_with_result( + ValType::I64, + |result| Instruction::copy(result, input), + FuelCostsProvider::base, + )?; + self.visit_i64_eqz() + } + Operand::Temp(input) => { + // Note: `funcref` and `externref` both serialize to `UntypedValue` + // as `u64` so we can use `i64.eqz` translation for `ref.is_null` + // via reinterpretation of the value's type. + self.stack.push_temp(ValType::I64, input.instr())?; + self.visit_i64_eqz() + } + Operand::Immediate(input) => { + let untyped = input.val().untyped(); + let is_null = match input.ty() { + ValType::FuncRef => FuncRef::from(untyped).is_null(), + ValType::ExternRef => ExternRef::from(untyped).is_null(), + invalid => panic!("`ref.is_null`: encountered invalid input type: {invalid:?}"), + }; + self.stack.push_immediate(i32::from(is_null))?; + Ok(()) + } + } + } + + fn visit_ref_func(&mut self, function_index: u32) -> Self::Output { + bail_unreachable!(self); + self.push_instr_with_result( + ValType::FuncRef, + |result| Instruction::ref_func(result, function_index), + FuelCostsProvider::instance, + )?; + Ok(()) + } + + fn visit_table_fill(&mut self, table: u32) -> Self::Output { + bail_unreachable!(self); + let (dst, value, len) = self.stack.pop3(); + let table_type = *self.module.get_type_of_table(TableIdx::from(table)); + let dst = self.layout.operand_to_reg(dst)?; + let value = self.layout.operand_to_reg(value)?; + let len = self.make_index16(len, table_type.index_ty())?; + let instr = match len { + Input::Reg(len) => Instruction::table_fill(dst, len, value), + Input::Immediate(len) => Instruction::table_fill_imm(dst, len, value), + }; + self.push_instr(instr, FuelCostsProvider::instance)?; + self.push_param(Instruction::table_index(table))?; + Ok(()) + } + + fn visit_table_get(&mut self, table: u32) -> Self::Output { + bail_unreachable!(self); + let table_type = *self.module.get_type_of_table(TableIdx::from(table)); + let index = self.stack.pop(); + let item_ty = table_type.element(); + let index_ty = table_type.index_ty(); + let index = self.make_index32(index, index_ty)?; + self.push_instr_with_result( + item_ty, + |result| match index { + Input::Reg(index) => Instruction::table_get(result, index), + Input::Immediate(index) => Instruction::table_get_imm(result, index), + }, + FuelCostsProvider::instance, + )?; + self.push_param(Instruction::table_index(table))?; + Ok(()) + } + + fn visit_table_set(&mut self, table: u32) -> Self::Output { + bail_unreachable!(self); + let table_type = *self.module.get_type_of_table(TableIdx::from(table)); + let index_ty = table_type.index_ty(); + let (index, value) = self.stack.pop2(); + let index = self.make_index32(index, index_ty)?; + let value = self.layout.operand_to_reg(value)?; + let instr = match index { + Input::Reg(index) => Instruction::table_set(index, value), + Input::Immediate(index) => Instruction::table_set_at(value, index), + }; + self.push_instr(instr, FuelCostsProvider::instance)?; + self.push_param(Instruction::table_index(table))?; + Ok(()) + } + + fn visit_table_grow(&mut self, table: u32) -> Self::Output { + bail_unreachable!(self); + let table_type = *self.module.get_type_of_table(TableIdx::from(table)); + let index_ty = table_type.index_ty(); + let (value, delta) = self.stack.pop2(); + let delta = self.make_index16(delta, index_ty)?; + if let Input::Immediate(delta) = delta { + if u64::from(delta) == 0 { + // Case: growing by 0 elements. + // + // Since `table.grow` returns the `table.size` before the + // operation a `table.grow` with `delta` of 0 can be translated + // as `table.size` instruction instead. + self.push_instr_with_result( + index_ty.ty(), + |result| Instruction::table_size(result, table), + FuelCostsProvider::instance, + )?; + return Ok(()); + } + } + let value = self.layout.operand_to_reg(value)?; + self.push_instr_with_result( + index_ty.ty(), + |result| match delta { + Input::Reg(delta) => Instruction::table_grow(result, delta, value), + Input::Immediate(delta) => Instruction::table_grow_imm(result, delta, value), + }, + FuelCostsProvider::instance, + )?; + self.push_param(Instruction::table_index(table))?; + Ok(()) + } + + fn visit_table_size(&mut self, table: u32) -> Self::Output { + bail_unreachable!(self); + let table_type = *self.module.get_type_of_table(TableIdx::from(table)); + let index_ty = table_type.index_ty(); + self.push_instr_with_result( + index_ty.ty(), + |result| Instruction::table_size(result, table), + FuelCostsProvider::instance, + )?; + Ok(()) + } + + fn visit_return_call(&mut self, function_index: u32) -> Self::Output { + bail_unreachable!(self); + let func_idx = FuncIdx::from(function_index); + let func_type = self.resolve_func_type(func_idx); + let len_params = usize::from(func_type.len_params()); + let instr = match self.module.get_engine_func(func_idx) { + Some(engine_func) => { + // Case: We are calling an internal function and can optimize + // this case by using the special instruction for it. + match len_params { + 0 => Instruction::return_call_internal_0(engine_func), + _ => Instruction::return_call_internal(engine_func), + } + } + None => { + // Case: We are calling an imported function and must use the + // general calling operator for it. + match len_params { + 0 => Instruction::return_call_imported_0(function_index), + _ => Instruction::return_call_imported(function_index), + } + } + }; + self.push_instr(instr, FuelCostsProvider::call)?; + self.stack.pop_n(len_params, &mut self.operands); + self.instrs + .encode_register_list(&self.operands, &mut self.layout)?; + self.reachable = false; + Ok(()) + } + + fn visit_return_call_indirect(&mut self, type_index: u32, table_index: u32) -> Self::Output { + bail_unreachable!(self); + let func_type = self.resolve_type(type_index); + let index = self.stack.pop(); + let indirect_params = self.call_indirect_params(index, table_index)?; + let len_params = usize::from(func_type.len_params()); + let instr = match (len_params, indirect_params) { + (0, Instruction::CallIndirectParams { .. }) => { + Instruction::return_call_indirect_0(type_index) + } + (0, Instruction::CallIndirectParamsImm16 { .. }) => { + Instruction::return_call_indirect_0_imm16(type_index) + } + (_, Instruction::CallIndirectParams { .. }) => { + Instruction::return_call_indirect(type_index) + } + (_, Instruction::CallIndirectParamsImm16 { .. }) => { + Instruction::return_call_indirect_imm16(type_index) + } + _ => unreachable!(), + }; + self.push_instr(instr, FuelCostsProvider::call)?; + self.push_param(indirect_params)?; + self.stack.pop_n(len_params, &mut self.operands); + self.instrs + .encode_register_list(&self.operands, &mut self.layout)?; + self.reachable = false; + Ok(()) + } + + fn visit_i64_add128(&mut self) -> Self::Output { + self.translate_i64_binop128(Instruction::i64_add128, wasm::i64_add128) + } + + fn visit_i64_sub128(&mut self) -> Self::Output { + self.translate_i64_binop128(Instruction::i64_sub128, wasm::i64_sub128) + } + + fn visit_i64_mul_wide_s(&mut self) -> Self::Output { + self.translate_i64_mul_wide_sx(Instruction::i64_mul_wide_s, wasm::i64_mul_wide_s, true) + } + + fn visit_i64_mul_wide_u(&mut self) -> Self::Output { + self.translate_i64_mul_wide_sx(Instruction::i64_mul_wide_u, wasm::i64_mul_wide_u, false) + } +} diff --git a/crates/wasmi/src/engine/translator/mod.rs b/crates/wasmi/src/engine/translator/mod.rs index a3f04e4584..478e84f206 100644 --- a/crates/wasmi/src/engine/translator/mod.rs +++ b/crates/wasmi/src/engine/translator/mod.rs @@ -3,7 +3,10 @@ mod comparator; mod driver; mod error; +#[cfg_attr(feature = "experimental-translator", expect(dead_code))] mod func; +#[cfg(feature = "experimental-translator")] +mod func2; mod labels; mod relink_result; mod utils; @@ -15,11 +18,13 @@ mod tests; #[cfg(doc)] use crate::Engine; -pub use self::{ - driver::FuncTranslationDriver, - error::TranslationError, - func::{FuncTranslator, FuncTranslatorAllocations}, -}; +#[cfg(not(feature = "experimental-translator"))] +pub use self::func::{FuncTranslator, FuncTranslatorAllocations}; + +#[cfg(feature = "experimental-translator")] +pub use self::func2::{FuncTranslator, FuncTranslatorAllocations}; + +pub use self::{driver::FuncTranslationDriver, error::TranslationError}; use super::code_map::CompiledFuncEntity; use crate::{ engine::EngineFunc, diff --git a/crates/wasmi/src/engine/translator/utils.rs b/crates/wasmi/src/engine/translator/utils.rs index dfc33c4e74..4aed2967ce 100644 --- a/crates/wasmi/src/engine/translator/utils.rs +++ b/crates/wasmi/src/engine/translator/utils.rs @@ -59,6 +59,10 @@ pub trait WasmInteger: /// Returns `true` if `self` is equal to zero (0). fn is_zero(self) -> bool; + + /// Returns the wrapped negated `self`. + #[cfg(feature = "experimental-translator")] // TODO: remove + fn wrapping_neg(self) -> Self; } macro_rules! impl_wasm_integer { @@ -74,6 +78,11 @@ macro_rules! impl_wasm_integer { fn is_zero(self) -> bool { self == 0 } + + #[cfg(feature = "experimental-translator")] // TODO: remove + fn wrapping_neg(self) -> Self { + Self::wrapping_neg(self) + } } )* }; @@ -85,7 +94,7 @@ impl_wasm_integer!(i32, u32, i64, u64); /// # Note /// /// This trait provides some utility methods useful for translation. -pub trait WasmFloat: Copy + Into + From { +pub trait WasmFloat: Typed + Copy + Into + From { /// Returns `true` if `self` is any kind of NaN value. fn is_nan(self) -> bool; diff --git a/crates/wast/Cargo.toml b/crates/wast/Cargo.toml index 52b280f389..9c189ad104 100644 --- a/crates/wast/Cargo.toml +++ b/crates/wast/Cargo.toml @@ -17,3 +17,6 @@ exclude.workspace = true wasmi = { workspace = true, features = ["std", "simd"] } wast = { workspace = true, features = ["wasm-module"] } anyhow = "1.0" + +[features] +experimental-translator = ["wasmi/experimental-translator"]