Merge branch 'master' into singlepass

# Conflicts:
#	lib/compiler-singlepass/src/codegen_x64.rs
#	lib/compiler-singlepass/src/common_decl.rs
#	lib/compiler-singlepass/src/emitter_x64.rs
#	lib/compiler-singlepass/src/lib.rs
#	lib/compiler-singlepass/src/machine.rs
#	lib/engine-jit/src/engine.rs
#	lib/engine-jit/src/serialize.rs
This commit is contained in:
Syrus
2020-05-18 15:14:44 -07:00
31 changed files with 917 additions and 435 deletions

View File

@@ -41,7 +41,7 @@ maintenance = { status = "actively-developed" }
default = ["wat", "cranelift", "jit"] default = ["wat", "cranelift", "jit"]
compiler = ["wasmer-engine-jit/compiler"] compiler = ["wasmer-engine-jit/compiler"]
engine = [] engine = []
jit = ["wasmer-engine-jit"] jit = ["wasmer-engine-jit", "engine"]
singlepass = [ singlepass = [
"wasmer-compiler-singlepass", "wasmer-compiler-singlepass",
"compiler", "compiler",

View File

@@ -7,11 +7,14 @@ use crate::RuntimeError;
use crate::{ExternType, FunctionType, GlobalType, MemoryType, TableType, ValType}; use crate::{ExternType, FunctionType, GlobalType, MemoryType, TableType, ValType};
use std::cmp::max; use std::cmp::max;
use std::slice; use std::slice;
use wasm_common::{HostFunction, Pages, ValueType, WasmTypeList, WithEnv, WithoutEnv}; use wasm_common::{
HostFunction, Pages, SignatureIndex, ValueType, WasmTypeList, WithEnv, WithoutEnv,
};
use wasmer_runtime::{ use wasmer_runtime::{
wasmer_call_trampoline, Export, ExportFunction, ExportGlobal, ExportMemory, ExportTable, wasmer_call_trampoline, Export, ExportFunction, ExportGlobal, ExportMemory, ExportTable,
LinearMemory, MemoryError, Table as RuntimeTable, VMCallerCheckedAnyfunc, VMContext, InstanceHandle, LinearMemory, MemoryError, Table as RuntimeTable, VMCallerCheckedAnyfunc,
VMFunctionBody, VMGlobalDefinition, VMMemoryDefinition, VMTrampoline, VMContext, VMDynamicFunctionImportContext, VMFunctionBody, VMFunctionKind, VMGlobalDefinition,
VMMemoryDefinition, VMTrampoline,
}; };
#[derive(Clone)] #[derive(Clone)]
@@ -477,32 +480,26 @@ impl Drop for Memory {
/// A function defined in the Wasm module /// A function defined in the Wasm module
#[derive(Clone, PartialEq)] #[derive(Clone, PartialEq)]
pub struct WasmFunc { pub struct WasmFunctionDefinition {
// The trampoline to do the call // The trampoline to do the call
trampoline: VMTrampoline, trampoline: VMTrampoline,
} }
/// A function defined in the Host
#[derive(Clone, PartialEq)]
pub struct HostFunc {
// func: wasm_common::Func<Args, Rets>,
}
/// The inner helper /// The inner helper
#[derive(Clone, PartialEq)] #[derive(Clone, PartialEq)]
pub enum InnerFunc { pub enum FunctionDefinition {
/// A function defined in the Wasm side /// A function defined in the Wasm side
Wasm(WasmFunc), Wasm(WasmFunctionDefinition),
/// A function defined in the Host side /// A function defined in the Host side
Host(HostFunc), Host,
} }
/// A WebAssembly `function`. /// A WebAssembly `function`.
#[derive(Clone, PartialEq)] #[derive(Clone, PartialEq)]
pub struct Function { pub struct Function {
store: Store, store: Store,
definition: FunctionDefinition,
// If the Function is owned by the Store, not the instance // If the Function is owned by the Store, not the instance
inner: InnerFunc,
owned_by_store: bool, owned_by_store: bool,
exported: ExportFunction, exported: ExportFunction,
} }
@@ -519,21 +516,71 @@ impl Function {
Rets: WasmTypeList, Rets: WasmTypeList,
Env: Sized, Env: Sized,
{ {
let func: wasm_common::Func<Args, Rets, Env> = wasm_common::Func::new(func); let func: wasm_common::Func<Args, Rets> = wasm_common::Func::new(func);
let address = func.address() as *const VMFunctionBody; let address = func.address() as *const VMFunctionBody;
let vmctx = (func.env().unwrap_or(std::ptr::null_mut()) as *mut _) as *mut VMContext; let vmctx = std::ptr::null_mut() as *mut _ as *mut VMContext;
let func_type = func.ty(); let func_type = func.ty();
let signature = store.engine().register_signature(&func_type); let signature = store.engine().register_signature(&func_type);
Self { Self {
store: store.clone(), store: store.clone(),
owned_by_store: true, owned_by_store: true,
inner: InnerFunc::Host(HostFunc { definition: FunctionDefinition::Host,
// func
}),
exported: ExportFunction { exported: ExportFunction {
address, address,
vmctx, vmctx,
signature, signature,
kind: VMFunctionKind::Static,
},
}
}
#[allow(clippy::cast_ptr_alignment)]
pub fn new_dynamic<F>(store: &Store, ty: &FunctionType, func: F) -> Self
where
F: Fn(&[Val]) -> Result<Vec<Val>, RuntimeError> + 'static,
{
let dynamic_ctx =
VMDynamicFunctionImportContext::from_context(VMDynamicFunctionWithoutEnv {
func: Box::new(func),
});
let address = std::ptr::null() as *const () as *const VMFunctionBody;
let vmctx = Box::leak(Box::new(dynamic_ctx)) as *mut _ as *mut VMContext;
let signature = store.engine().register_signature(&ty);
Self {
store: store.clone(),
owned_by_store: true,
definition: FunctionDefinition::Host,
exported: ExportFunction {
address,
kind: VMFunctionKind::Dynamic,
vmctx,
signature,
},
}
}
#[allow(clippy::cast_ptr_alignment)]
pub fn new_dynamic_env<F, Env>(store: &Store, ty: &FunctionType, env: &mut Env, func: F) -> Self
where
F: Fn(&mut Env, &[Val]) -> Result<Vec<Val>, RuntimeError> + 'static,
Env: Sized,
{
let dynamic_ctx = VMDynamicFunctionImportContext::from_context(VMDynamicFunctionWithEnv {
env,
func: Box::new(func),
});
let address = std::ptr::null() as *const () as *const VMFunctionBody;
let vmctx = Box::leak(Box::new(dynamic_ctx)) as *mut _ as *mut VMContext;
let signature = store.engine().register_signature(&ty);
Self {
store: store.clone(),
owned_by_store: true,
definition: FunctionDefinition::Host,
exported: ExportFunction {
address,
kind: VMFunctionKind::Dynamic,
vmctx,
signature,
}, },
} }
} }
@@ -550,19 +597,23 @@ impl Function {
Rets: WasmTypeList, Rets: WasmTypeList,
Env: Sized, Env: Sized,
{ {
let func: wasm_common::Func<Args, Rets, Env> = wasm_common::Func::new_env(env, func); let func: wasm_common::Func<Args, Rets> = wasm_common::Func::new(func);
let address = func.address() as *const VMFunctionBody; let address = func.address() as *const VMFunctionBody;
let vmctx = (func.env().unwrap_or(std::ptr::null_mut()) as *mut _) as *mut VMContext; // TODO: We need to refactor the Function context.
// Right now is structured as it's always a `VMContext`. However, only
// Wasm-defined functions have a `VMContext`.
// In the case of Host-defined functions `VMContext` is whatever environment
// the user want to attach to the function.
let vmctx = env as *mut _ as *mut VMContext;
let func_type = func.ty(); let func_type = func.ty();
let signature = store.engine().register_signature(&func_type); let signature = store.engine().register_signature(&func_type);
Self { Self {
store: store.clone(), store: store.clone(),
owned_by_store: true, owned_by_store: true,
inner: InnerFunc::Host(HostFunc { definition: FunctionDefinition::Host,
// func
}),
exported: ExportFunction { exported: ExportFunction {
address, address,
kind: VMFunctionKind::Static,
vmctx, vmctx,
signature, signature,
}, },
@@ -584,7 +635,7 @@ impl Function {
fn call_wasm( fn call_wasm(
&self, &self,
func: &WasmFunc, func: &WasmFunctionDefinition,
params: &[Val], params: &[Val],
results: &mut [Val], results: &mut [Val],
) -> Result<(), RuntimeError> { ) -> Result<(), RuntimeError> {
@@ -675,8 +726,8 @@ impl Function {
/// call the trampoline. /// call the trampoline.
pub fn call(&self, params: &[Val]) -> Result<Box<[Val]>, RuntimeError> { pub fn call(&self, params: &[Val]) -> Result<Box<[Val]>, RuntimeError> {
let mut results = vec![Val::null(); self.result_arity()]; let mut results = vec![Val::null(); self.result_arity()];
match &self.inner { match &self.definition {
InnerFunc::Wasm(wasm) => { FunctionDefinition::Wasm(wasm) => {
self.call_wasm(&wasm, params, &mut results)?; self.call_wasm(&wasm, params, &mut results)?;
} }
_ => {} // _ => unimplemented!("The host is unimplemented"), _ => {} // _ => unimplemented!("The host is unimplemented"),
@@ -685,11 +736,14 @@ impl Function {
} }
pub(crate) fn from_export(store: &Store, wasmer_export: ExportFunction) -> Self { pub(crate) fn from_export(store: &Store, wasmer_export: ExportFunction) -> Self {
let trampoline = store.engine().trampoline(wasmer_export.signature).unwrap(); let trampoline = store
.engine()
.function_call_trampoline(wasmer_export.signature)
.unwrap();
Self { Self {
store: store.clone(), store: store.clone(),
owned_by_store: false, owned_by_store: false,
inner: InnerFunc::Wasm(WasmFunc { trampoline }), definition: FunctionDefinition::Wasm(WasmFunctionDefinition { trampoline }),
exported: wasmer_export, exported: wasmer_export,
} }
} }
@@ -720,3 +774,113 @@ impl std::fmt::Debug for Function {
Ok(()) Ok(())
} }
} }
/// This trait is one that all dynamic funcitons must fulfill.
trait VMDynamicFunction {
fn call(&self, args: &[Val]) -> Result<Vec<Val>, RuntimeError>;
}
struct VMDynamicFunctionWithoutEnv {
func: Box<dyn Fn(&[Val]) -> Result<Vec<Val>, RuntimeError> + 'static>,
}
impl VMDynamicFunction for VMDynamicFunctionWithoutEnv {
fn call(&self, args: &[Val]) -> Result<Vec<Val>, RuntimeError> {
(*self.func)(&args)
}
}
struct VMDynamicFunctionWithEnv<Env>
where
Env: Sized,
{
func: Box<dyn Fn(&mut Env, &[Val]) -> Result<Vec<Val>, RuntimeError> + 'static>,
env: *mut Env,
}
impl<Env> VMDynamicFunction for VMDynamicFunctionWithEnv<Env>
where
Env: Sized,
{
fn call(&self, args: &[Val]) -> Result<Vec<Val>, RuntimeError> {
unsafe { (*self.func)(&mut *self.env, &args) }
}
}
trait VMDynamicFunctionImportCall<T: VMDynamicFunction> {
fn from_context(ctx: T) -> Self;
fn address_ptr() -> *const VMFunctionBody;
unsafe fn func_wrapper(
&self,
caller_vmctx: *mut VMContext,
sig_index: SignatureIndex,
values_vec: *mut i128,
);
}
impl<T: VMDynamicFunction> VMDynamicFunctionImportCall<T> for VMDynamicFunctionImportContext<T> {
fn from_context(ctx: T) -> Self {
Self {
address: Self::address_ptr(),
ctx,
}
}
fn address_ptr() -> *const VMFunctionBody {
Self::func_wrapper as *const () as *const VMFunctionBody
}
// This function wraps our func, to make it compatible with the
// reverse trampoline signature
unsafe fn func_wrapper(
// Note: we use the trick that the first param to this function is the `VMDynamicFunctionImportContext`
// itself, so rather than doing `dynamic_ctx: &VMDynamicFunctionImportContext<T>`, we simplify it a bit
&self,
caller_vmctx: *mut VMContext,
sig_index: SignatureIndex,
values_vec: *mut i128,
) {
use std::panic::{self, AssertUnwindSafe};
let result = panic::catch_unwind(AssertUnwindSafe(|| {
// This is actually safe, since right now the function signature
// receives two contexts:
// 1. `vmctx`: the context associated to where the function is defined.
// It will be `VMContext` in case is defined in Wasm, and a custom
// `Env` in case is host defined.
// 2. `caller_vmctx`: the context associated to whoever is calling that function.
//
// Because this code will only be reached when calling from wasm to host, we
// can assure the callee_vmctx is indeed a VMContext, and hence is completely
// safe to get a handle from it.
let handle = InstanceHandle::from_vmctx(caller_vmctx);
let module = handle.module_ref();
let func_ty = &module.signatures[sig_index];
let mut args = Vec::with_capacity(func_ty.params().len());
for (i, ty) in func_ty.params().iter().enumerate() {
args.push(Val::read_value_from(values_vec.add(i), *ty));
}
let returns = self.ctx.call(&args)?;
// We need to dynamically check that the returns
// match the expected types, as well as expected length.
let return_types = returns.iter().map(|ret| ret.ty()).collect::<Vec<_>>();
if return_types != func_ty.results() {
return Err(RuntimeError::new(format!(
"Dynamic function returned wrong signature. Expected {:?} but got {:?}",
func_ty.results(),
return_types
)));
}
for (i, ret) in returns.iter().enumerate() {
ret.write_value_to(values_vec.add(i));
}
Ok(())
}));
match result {
Ok(Ok(())) => {}
Ok(Err(trap)) => wasmer_runtime::raise_user_trap(Box::new(trap)),
Err(panic) => wasmer_runtime::resume_panic(panic),
}
}
}

View File

@@ -272,7 +272,7 @@ mod test {
// create a memory // create a memory
let store = Store::default(); let store = Store::default();
let memory_descriptor = MemoryType::new(1, Some(1), false); let memory_descriptor = MemoryType::new(1, Some(1), false);
let memory = Memory::new(&store, memory_descriptor); let memory = Memory::new(&store, memory_descriptor).unwrap();
// test that basic access works and that len = 0 works, but oob does not // test that basic access works and that len = 0 works, but oob does not
let start_wasm_ptr: WasmPtr<u8> = WasmPtr::new(0); let start_wasm_ptr: WasmPtr<u8> = WasmPtr::new(0);

View File

@@ -64,6 +64,9 @@ impl ValAnyFunc for Val {
let export = wasmer_runtime::ExportFunction { let export = wasmer_runtime::ExportFunction {
address: item.func_ptr, address: item.func_ptr,
signature: item.type_index, signature: item.type_index,
// All functions in tables are already Static (as dynamic functions
// are converted to use the trampolines with static signatures).
kind: wasmer_runtime::VMFunctionKind::Static,
vmctx: item.vmctx, vmctx: item.vmctx,
}; };
let f = Function::from_export(store, export); let f = Function::from_export(store, export);

View File

@@ -4,7 +4,9 @@ use crate::address_map::get_function_address_map;
use crate::config::CraneliftConfig; use crate::config::CraneliftConfig;
use crate::func_environ::{get_func_name, FuncEnvironment}; use crate::func_environ::{get_func_name, FuncEnvironment};
use crate::sink::{RelocSink, TrapSink}; use crate::sink::{RelocSink, TrapSink};
use crate::trampoline::{make_wasm_trampoline, FunctionBuilderContext}; use crate::trampoline::{
make_trampoline_dynamic_function, make_trampoline_function_call, FunctionBuilderContext,
};
use crate::translator::{ use crate::translator::{
compiled_function_unwind_info, signature_to_cranelift_ir, transform_jump_table, FuncTranslator, compiled_function_unwind_info, signature_to_cranelift_ir, transform_jump_table, FuncTranslator,
}; };
@@ -14,7 +16,8 @@ use cranelift_codegen::{binemit, isa, Context};
use rayon::prelude::{IntoParallelRefIterator, ParallelIterator}; use rayon::prelude::{IntoParallelRefIterator, ParallelIterator};
use wasm_common::entity::PrimaryMap; use wasm_common::entity::PrimaryMap;
use wasm_common::{ use wasm_common::{
Features, FunctionType, LocalFunctionIndex, MemoryIndex, SignatureIndex, TableIndex, Features, FunctionIndex, FunctionType, LocalFunctionIndex, MemoryIndex, SignatureIndex,
TableIndex,
}; };
use wasmer_compiler::CompileError; use wasmer_compiler::CompileError;
use wasmer_compiler::{ use wasmer_compiler::{
@@ -155,15 +158,37 @@ impl Compiler for CraneliftCompiler {
Ok(Compilation::new(functions, custom_sections)) Ok(Compilation::new(functions, custom_sections))
} }
fn compile_wasm_trampolines( fn compile_function_call_trampolines(
&self, &self,
signatures: &[FunctionType], signatures: &[FunctionType],
) -> Result<Vec<FunctionBody>, CompileError> { ) -> Result<Vec<FunctionBody>, CompileError> {
signatures signatures
.par_iter() .par_iter()
.map_init(FunctionBuilderContext::new, |mut cx, sig| { .map_init(FunctionBuilderContext::new, |mut cx, sig| {
make_wasm_trampoline(&*self.isa, &mut cx, sig, std::mem::size_of::<u128>()) make_trampoline_function_call(&*self.isa, &mut cx, sig)
}) })
.collect::<Result<Vec<_>, CompileError>>() .collect::<Result<Vec<_>, CompileError>>()
} }
fn compile_dynamic_function_trampolines(
&self,
module: &Module,
) -> Result<PrimaryMap<FunctionIndex, FunctionBody>, CompileError> {
use wasmer_runtime::VMOffsets;
let isa = self.isa();
let frontend_config = isa.frontend_config();
let offsets = VMOffsets::new(frontend_config.pointer_bytes(), module);
Ok(module
.functions
.values()
.take(module.num_imported_funcs)
.collect::<Vec<_>>()
.par_iter()
.map_init(FunctionBuilderContext::new, |mut cx, sig_index| {
make_trampoline_dynamic_function(&*self.isa, &module, &offsets, &mut cx, &sig_index)
})
.collect::<Result<Vec<_>, CompileError>>()?
.into_iter()
.collect::<PrimaryMap<FunctionIndex, FunctionBody>>())
}
} }

View File

@@ -58,7 +58,7 @@ pub use crate::compiler::CraneliftCompiler;
pub use crate::config::CraneliftConfig; pub use crate::config::CraneliftConfig;
pub use crate::debug::{FrameLayout, FrameLayoutChange, FrameLayouts}; pub use crate::debug::{FrameLayout, FrameLayoutChange, FrameLayouts};
pub use crate::debug::{ModuleMemoryOffset, ModuleVmctxInfo, ValueLabelsRanges}; pub use crate::debug::{ModuleMemoryOffset, ModuleVmctxInfo, ValueLabelsRanges};
pub use crate::trampoline::make_wasm_trampoline; pub use crate::trampoline::make_trampoline_function_call;
/// Version number of this crate. /// Version number of this crate.
pub const VERSION: &str = env!("CARGO_PKG_VERSION"); pub const VERSION: &str = env!("CARGO_PKG_VERSION");

View File

@@ -0,0 +1,146 @@
//! A trampoline generator for calling dynamic host functions from Wasm.
use super::binemit::TrampolineRelocSink;
use crate::translator::{compiled_function_unwind_info, signature_to_cranelift_ir};
use cranelift_codegen::ir::{
types, ExternalName, Function, InstBuilder, MemFlags, StackSlotData, StackSlotKind,
};
use cranelift_codegen::isa::TargetIsa;
use cranelift_codegen::print_errors::pretty_error;
use cranelift_codegen::Context;
use cranelift_codegen::{binemit, ir};
use std::cmp;
use std::mem;
use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext};
use wasm_common::entity::EntityRef;
use wasm_common::SignatureIndex;
use wasmer_compiler::{CompileError, FunctionBody};
use wasmer_runtime::{Module, VMOffsets};
/// Create a trampoline for invoking a WebAssembly function.
pub fn make_trampoline_dynamic_function(
isa: &dyn TargetIsa,
module: &Module,
offsets: &VMOffsets,
fn_builder_ctx: &mut FunctionBuilderContext,
sig_index: &SignatureIndex,
) -> Result<FunctionBody, CompileError> {
let func_type = &module.signatures[*sig_index];
let pointer_type = isa.pointer_type();
let frontend_config = isa.frontend_config();
let signature = signature_to_cranelift_ir(func_type, &frontend_config);
let mut stub_sig = ir::Signature::new(frontend_config.default_call_conv);
// Add the caller `vmctx` parameter.
stub_sig.params.push(ir::AbiParam::special(
pointer_type,
ir::ArgumentPurpose::VMContext,
));
// Add the caller/callee `vmctx` parameter.
stub_sig.params.push(ir::AbiParam::new(pointer_type));
// Add the `sig_index` parameter.
stub_sig.params.push(ir::AbiParam::new(types::I32));
// Add the `values_vec` parameter.
stub_sig.params.push(ir::AbiParam::new(pointer_type));
// Compute the size of the values vector. The vmctx and caller vmctx are passed separately.
let value_size = mem::size_of::<u128>();
let values_vec_len =
(value_size * cmp::max(signature.params.len() - 2, signature.returns.len())) as u32;
let mut context = Context::new();
context.func = Function::with_name_signature(ExternalName::user(0, 0), signature.clone());
let ss = context.func.create_stack_slot(StackSlotData::new(
StackSlotKind::ExplicitSlot,
values_vec_len,
));
{
let mut builder = FunctionBuilder::new(&mut context.func, fn_builder_ctx);
let block0 = builder.create_block();
builder.append_block_params_for_function_params(block0);
builder.switch_to_block(block0);
builder.seal_block(block0);
let values_vec_ptr_val = builder.ins().stack_addr(pointer_type, ss, 0);
let mflags = MemFlags::trusted();
// We only get the non-vmctx arguments
for i in 2..signature.params.len() {
let val = builder.func.dfg.block_params(block0)[i];
builder.ins().store(
mflags,
val,
values_vec_ptr_val,
((i - 2) * value_size) as i32,
);
}
let block_params = builder.func.dfg.block_params(block0);
let vmctx_ptr_val = block_params[0];
let caller_vmctx_ptr_val = block_params[1];
// Get the signature index
let caller_sig_id = builder.ins().iconst(types::I32, sig_index.index() as i64);
let callee_args = vec![
vmctx_ptr_val,
caller_vmctx_ptr_val,
caller_sig_id,
values_vec_ptr_val,
];
let new_sig = builder.import_signature(stub_sig);
let mem_flags = ir::MemFlags::trusted();
let callee_value = builder.ins().load(
pointer_type,
mem_flags,
vmctx_ptr_val,
offsets.vmdynamicfunction_import_context_address() as i32,
);
builder
.ins()
.call_indirect(new_sig, callee_value, &callee_args);
let mflags = MemFlags::trusted();
let mut results = Vec::new();
for (i, r) in signature.returns.iter().enumerate() {
let load = builder.ins().load(
r.value_type,
mflags,
values_vec_ptr_val,
(i * value_size) as i32,
);
results.push(load);
}
builder.ins().return_(&results);
builder.finalize()
}
let mut code_buf = Vec::new();
let mut reloc_sink = TrampolineRelocSink {};
let mut trap_sink = binemit::NullTrapSink {};
let mut stackmap_sink = binemit::NullStackmapSink {};
context
.compile_and_emit(
isa,
&mut code_buf,
&mut reloc_sink,
&mut trap_sink,
&mut stackmap_sink,
)
.map_err(|error| CompileError::Codegen(pretty_error(&context.func, Some(isa), error)))?;
let unwind_info = compiled_function_unwind_info(isa, &context);
Ok(FunctionBody {
body: code_buf,
unwind_info,
})
}

View File

@@ -15,15 +15,15 @@ use cranelift_codegen::print_errors::pretty_error;
use cranelift_codegen::Context; use cranelift_codegen::Context;
use cranelift_codegen::{binemit, ir}; use cranelift_codegen::{binemit, ir};
use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext}; use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext};
use std::mem;
use wasm_common::FunctionType; use wasm_common::FunctionType;
use wasmer_compiler::{CompileError, FunctionBody}; use wasmer_compiler::{CompileError, FunctionBody};
/// Create a trampoline for invoking a WebAssembly function. /// Create a trampoline for invoking a WebAssembly function.
pub fn make_wasm_trampoline( pub fn make_trampoline_function_call(
isa: &dyn TargetIsa, isa: &dyn TargetIsa,
fn_builder_ctx: &mut FunctionBuilderContext, fn_builder_ctx: &mut FunctionBuilderContext,
func_type: &FunctionType, func_type: &FunctionType,
value_size: usize,
) -> Result<FunctionBody, CompileError> { ) -> Result<FunctionBody, CompileError> {
let pointer_type = isa.pointer_type(); let pointer_type = isa.pointer_type();
let frontend_config = isa.frontend_config(); let frontend_config = isa.frontend_config();
@@ -49,6 +49,7 @@ pub fn make_wasm_trampoline(
context.func = ir::Function::with_name_signature(ir::ExternalName::user(0, 0), wrapper_sig); context.func = ir::Function::with_name_signature(ir::ExternalName::user(0, 0), wrapper_sig);
context.func.collect_frame_layout_info(); context.func.collect_frame_layout_info();
let value_size = mem::size_of::<u128>();
{ {
let mut builder = FunctionBuilder::new(&mut context.func, fn_builder_ctx); let mut builder = FunctionBuilder::new(&mut context.func, fn_builder_ctx);
let block0 = builder.create_block(); let block0 = builder.create_block();

View File

@@ -1,10 +1,10 @@
#![allow(missing_docs)] #![allow(missing_docs)]
// mod host; mod dynamic_function;
mod wasm; mod function_call;
// pub use host::make_host_trampoline; pub use self::dynamic_function::make_trampoline_dynamic_function;
pub use self::wasm::make_wasm_trampoline; pub use self::function_call::make_trampoline_function_call;
// TODO: Delete // TODO: Delete
pub mod ir { pub mod ir {

View File

@@ -82,6 +82,7 @@ impl Compiler for LLVMCompiler {
.cloned() .cloned()
.unwrap_or_else(|| format!("fn{}", func_index.index())); .unwrap_or_else(|| format!("fn{}", func_index.index()));
} }
let mut module_custom_sections = PrimaryMap::new();
let mut functions = function_body_inputs let mut functions = function_body_inputs
.into_iter() .into_iter()
.collect::<Vec<(LocalFunctionIndex, &FunctionBodyData<'_>)>>() .collect::<Vec<(LocalFunctionIndex, &FunctionBodyData<'_>)>>()
@@ -101,46 +102,41 @@ impl Compiler for LLVMCompiler {
}) })
.collect::<Result<Vec<_>, CompileError>>()? .collect::<Result<Vec<_>, CompileError>>()?
.into_iter() .into_iter()
.map(|(mut function, local_relocations, custom_sections)| { .map(|(mut compiled_function, mut function_custom_sections)| {
/// We collect the sections data let first_section = module_custom_sections.len() as u32;
for (local_idx, custom_section) in custom_sections.iter().enumerate() { for (_, custom_section) in function_custom_sections.iter() {
let local_idx = local_idx as u32; // TODO: remove this call to clone()
// TODO: these section numbers are potentially wrong, if there's let mut custom_section = custom_section.clone();
// no Read and only a ReadExecute then ReadExecute is 0. for mut reloc in &mut custom_section.relocations {
let (ref mut section, section_num) = match &custom_section.protection { match reloc.reloc_target {
CustomSectionProtection::Read => { RelocationTarget::CustomSection(index) => {
(&mut readonly_section, SectionIndex::from_u32(0)) reloc.reloc_target = RelocationTarget::CustomSection(
SectionIndex::from_u32(first_section + index.as_u32()),
)
} }
}; _ => {}
let offset = section.bytes.len() as i64;
section.bytes.append(&custom_section.bytes);
// TODO: we're needlessly rescanning the whole list.
for local_relocation in &local_relocations {
if local_relocation.local_section_index == local_idx {
used_readonly_section = true;
function.relocations.push(Relocation {
kind: local_relocation.kind,
reloc_target: RelocationTarget::CustomSection(section_num),
offset: local_relocation.offset,
addend: local_relocation.addend + offset,
});
} }
} }
module_custom_sections.push(custom_section);
} }
Ok(function) for mut reloc in &mut compiled_function.relocations {
match reloc.reloc_target {
RelocationTarget::CustomSection(index) => {
reloc.reloc_target = RelocationTarget::CustomSection(
SectionIndex::from_u32(first_section + index.as_u32()),
)
}
_ => {}
}
}
compiled_function
}) })
.collect::<Result<Vec<_>, CompileError>>()?
.into_iter()
.collect::<PrimaryMap<LocalFunctionIndex, _>>(); .collect::<PrimaryMap<LocalFunctionIndex, _>>();
let mut custom_sections = PrimaryMap::new(); Ok(Compilation::new(functions, module_custom_sections))
if used_readonly_section {
custom_sections.push(readonly_section);
}
Ok(Compilation::new(functions, custom_sections))
} }
fn compile_wasm_trampolines( fn compile_function_call_trampolines(
&self, &self,
signatures: &[FunctionType], signatures: &[FunctionType],
) -> Result<Vec<FunctionBody>, CompileError> { ) -> Result<Vec<FunctionBody>, CompileError> {
@@ -151,4 +147,12 @@ impl Compiler for LLVMCompiler {
}) })
.collect::<Result<Vec<_>, CompileError>>() .collect::<Result<Vec<_>, CompileError>>()
} }
fn compile_dynamic_function_trampolines(
&self,
module: &Module,
) -> Result<PrimaryMap<FunctionIndex, FunctionBody>, CompileError> {
Ok(PrimaryMap::new())
// unimplemented!("Dynamic funciton trampolines not yet implemented");
}
} }

View File

@@ -50,10 +50,22 @@ impl LLVMConfig {
// Override the default multi-value switch // Override the default multi-value switch
features.multi_value = false; features.multi_value = false;
let operating_system =
if target.triple().operating_system == wasmer_compiler::OperatingSystem::Darwin {
// LLVM detects static relocation + darwin + 64-bit and
// force-enables PIC because MachO doesn't support that
// combination. They don't check whether they're targeting
// MachO, they check whether the OS is set to Darwin.
//
// Since both linux and darwin use SysV ABI, this should work.
wasmer_compiler::OperatingSystem::Linux
} else {
target.triple().operating_system
};
let triple = Triple { let triple = Triple {
architecture: target.triple().architecture, architecture: target.triple().architecture,
vendor: target.triple().vendor.clone(), vendor: target.triple().vendor.clone(),
operating_system: target.triple().operating_system, operating_system,
environment: target.triple().environment, environment: target.triple().environment,
binary_format: target_lexicon::BinaryFormat::Elf, binary_format: target_lexicon::BinaryFormat::Elf,
}; };

View File

@@ -29,10 +29,12 @@ use inkwell::{
}; };
use smallvec::SmallVec; use smallvec::SmallVec;
use std::any::Any; use std::any::Any;
use std::collections::HashMap; use std::collections::{HashMap, HashSet};
use std::convert::TryFrom;
use std::num::TryFromIntError;
use crate::config::LLVMConfig; use crate::config::LLVMConfig;
use wasm_common::entity::{EntityRef, PrimaryMap, SecondaryMap}; use wasm_common::entity::{PrimaryMap, SecondaryMap};
use wasm_common::{ use wasm_common::{
FunctionIndex, FunctionType, GlobalIndex, LocalFunctionIndex, MemoryIndex, MemoryType, FunctionIndex, FunctionType, GlobalIndex, LocalFunctionIndex, MemoryIndex, MemoryType,
Mutability, SignatureIndex, TableIndex, Type, Mutability, SignatureIndex, TableIndex, Type,
@@ -40,9 +42,9 @@ use wasm_common::{
use wasmer_compiler::wasmparser::{self, BinaryReader, MemoryImmediate, Operator}; use wasmer_compiler::wasmparser::{self, BinaryReader, MemoryImmediate, Operator};
use wasmer_compiler::{ use wasmer_compiler::{
to_wasm_error, wasm_unsupported, Addend, CodeOffset, CompileError, CompiledFunction, to_wasm_error, wasm_unsupported, Addend, CodeOffset, CompileError, CompiledFunction,
CompiledFunctionFrameInfo, CustomSection, CustomSectionProtection, FunctionAddressMap, CompiledFunctionFrameInfo, CustomSection, CustomSectionProtection, CustomSections,
FunctionBody, FunctionBodyData, InstructionAddressMap, Relocation, RelocationKind, FunctionAddressMap, FunctionBody, FunctionBodyData, InstructionAddressMap, Relocation,
RelocationTarget, SectionBody, SourceLoc, WasmResult, RelocationKind, RelocationTarget, SectionBody, SectionIndex, SourceLoc, WasmResult,
}; };
use wasmer_runtime::libcalls::LibCall; use wasmer_runtime::libcalls::LibCall;
use wasmer_runtime::Module as WasmerCompilerModule; use wasmer_runtime::Module as WasmerCompilerModule;
@@ -52,6 +54,30 @@ use wasmer_runtime::{MemoryPlan, MemoryStyle, TablePlan, VMBuiltinFunctionIndex,
use std::fs; use std::fs;
use std::io::Write; use std::io::Write;
use wasm_common::entity::entity_impl;
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
pub struct ElfSectionIndex(u32);
entity_impl!(ElfSectionIndex);
impl ElfSectionIndex {
pub fn is_undef(&self) -> bool {
self.as_u32() == goblin::elf::section_header::SHN_UNDEF
}
pub fn from_usize(value: usize) -> Result<Self, CompileError> {
match u32::try_from(value) {
Err(_) => Err(CompileError::Codegen(format!(
"elf section index {} does not fit in 32 bits",
value
))),
Ok(value) => Ok(ElfSectionIndex::from_u32(value)),
}
}
pub fn as_usize(&self) -> usize {
self.as_u32() as usize
}
}
// TODO // TODO
fn wptype_to_type(ty: wasmparser::Type) -> WasmResult<Type> { fn wptype_to_type(ty: wasmparser::Type) -> WasmResult<Type> {
match ty { match ty {
@@ -84,15 +110,6 @@ fn const_zero<'ctx>(ty: BasicTypeEnum<'ctx>) -> BasicValueEnum<'ctx> {
} }
} }
// Relocation against a per-function section.
#[derive(Debug)]
pub struct LocalRelocation {
pub kind: RelocationKind,
pub local_section_index: u32,
pub offset: CodeOffset,
pub addend: Addend,
}
impl FuncTranslator { impl FuncTranslator {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
@@ -109,7 +126,7 @@ impl FuncTranslator {
memory_plans: &PrimaryMap<MemoryIndex, MemoryPlan>, memory_plans: &PrimaryMap<MemoryIndex, MemoryPlan>,
table_plans: &PrimaryMap<TableIndex, TablePlan>, table_plans: &PrimaryMap<TableIndex, TablePlan>,
func_names: &SecondaryMap<FunctionIndex, String>, func_names: &SecondaryMap<FunctionIndex, String>,
) -> Result<(CompiledFunction, Vec<LocalRelocation>, Vec<CustomSection>), CompileError> { ) -> Result<(CompiledFunction, CustomSections), CompileError> {
let func_index = wasm_module.func_index(*local_func_index); let func_index = wasm_module.func_index(*local_func_index);
let func_name = &func_names[func_index]; let func_name = &func_names[func_index];
let module_name = match wasm_module.name.as_ref() { let module_name = match wasm_module.name.as_ref() {
@@ -135,7 +152,7 @@ impl FuncTranslator {
// TODO: figure out how many bytes long vmctx is, and mark it dereferenceable. (no need to mark it nonnull once we do this.) // TODO: figure out how many bytes long vmctx is, and mark it dereferenceable. (no need to mark it nonnull once we do this.)
// TODO: mark vmctx nofree // TODO: mark vmctx nofree
func.set_personality_function(intrinsics.personality); func.set_personality_function(intrinsics.personality);
func.as_global_value().set_section("wasmer_function"); func.as_global_value().set_section(".wasmer_function");
let entry = self.ctx.append_basic_block(func, "entry"); let entry = self.ctx.append_basic_block(func, "entry");
let start_of_code = self.ctx.append_basic_block(func, "start_of_code"); let start_of_code = self.ctx.append_basic_block(func, "start_of_code");
@@ -165,7 +182,9 @@ impl FuncTranslator {
for idx in 0..wasm_fn_type.params().len() { for idx in 0..wasm_fn_type.params().len() {
let ty = wasm_fn_type.params()[idx]; let ty = wasm_fn_type.params()[idx];
let ty = type_to_llvm(&intrinsics, ty); let ty = type_to_llvm(&intrinsics, ty);
let value = func.get_nth_param((idx + 2) as u32).unwrap(); let value = func
.get_nth_param((idx as u32).checked_add(2).unwrap())
.unwrap();
// TODO: don't interleave allocas and stores. // TODO: don't interleave allocas and stores.
let alloca = cache_builder.build_alloca(ty, "param"); let alloca = cache_builder.build_alloca(ty, "param");
cache_builder.build_store(alloca, value); cache_builder.build_store(alloca, value);
@@ -335,74 +354,118 @@ impl FuncTranslator {
Some(name.unwrap()) Some(name.unwrap())
}; };
let wasmer_function_idx = elf // Build up a mapping from a section to its relocation sections.
let reloc_sections = elf.shdr_relocs.iter().fold(
HashMap::new(),
|mut map: HashMap<_, Vec<_>>, (section_index, reloc_section)| {
let target_section = elf.section_headers[*section_index].sh_info as usize;
let target_section = ElfSectionIndex::from_usize(target_section).unwrap();
map.entry(target_section).or_default().push(reloc_section);
map
},
);
let mut visited: HashSet<ElfSectionIndex> = HashSet::new();
let mut worklist: Vec<ElfSectionIndex> = Vec::new();
let mut section_targets: HashMap<ElfSectionIndex, RelocationTarget> = HashMap::new();
let wasmer_function_index = elf
.section_headers .section_headers
.iter() .iter()
.enumerate() .enumerate()
.filter(|(_, section)| get_section_name(section) == Some("wasmer_function")) .filter(|(_, section)| get_section_name(section) == Some(".wasmer_function"))
.map(|(idx, _)| idx) .map(|(index, _)| index)
.take(1)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
// TODO: handle errors here instead of asserting. if wasmer_function_index.len() != 1 {
assert!(wasmer_function_idx.len() == 1); return Err(CompileError::Codegen(format!(
let wasmer_function_idx = wasmer_function_idx[0]; "found {} sections named .wasmer_function",
wasmer_function_index.len()
let bytes = elf.section_headers[wasmer_function_idx].file_range(); )));
let bytes = mem_buf_slice[bytes.start..bytes.end].to_vec();
let mut relocations = vec![];
let mut local_relocations = vec![];
let mut required_custom_sections = HashMap::new();
for (section_index, reloc_section) in &elf.shdr_relocs {
let section_name = get_section_name(&elf.section_headers[*section_index]);
if section_name == Some(".rel.rodata") || section_name == Some(".rela.rodata") {
return Err(CompileError::Codegen(
"jump tables not yet implemented".to_string(),
));
} }
if section_name != Some(".relawasmer_function") let wasmer_function_index = wasmer_function_index[0];
&& section_name != Some(".relwasmer_function") let wasmer_function_index = ElfSectionIndex::from_usize(wasmer_function_index)?;
let mut section_to_custom_section = HashMap::new();
section_targets.insert(
wasmer_function_index,
RelocationTarget::LocalFunc(*local_func_index),
);
let mut next_custom_section: u32 = 0;
let mut elf_section_to_target = |elf_section_index: ElfSectionIndex| {
*section_targets.entry(elf_section_index).or_insert_with(|| {
let next = SectionIndex::from_u32(next_custom_section);
section_to_custom_section.insert(elf_section_index, next);
let target = RelocationTarget::CustomSection(next);
next_custom_section += 1;
target
})
};
let section_bytes = |elf_section_index: ElfSectionIndex| {
let elf_section_index = elf_section_index.as_usize();
let byte_range = elf.section_headers[elf_section_index].file_range();
mem_buf_slice[byte_range.start..byte_range.end].to_vec()
};
// From elf section index to list of Relocations. Although we use a Vec,
// the order of relocations is not important.
let mut relocations: HashMap<ElfSectionIndex, Vec<Relocation>> = HashMap::new();
// Each iteration of this loop pulls a section and the relocations
// relocations that apply to it. We begin with the ".wasmer_function"
// section, and then parse all relocation sections that apply to that
// section. Those relocations may refer to additional sections which we
// then add to the worklist until we've visited the closure of
// everything needed to run the code in ".wasmer_function".
//
// `worklist` is the list of sections we have yet to visit. It never
// contains any duplicates or sections we've already visited. `visited`
// contains all the sections we've ever added to the worklist in a set
// so that we can quickly check whether a section is new before adding
// it to worklist. `section_to_custom_section` is filled in with all
// the sections we want to include.
worklist.push(wasmer_function_index);
visited.insert(wasmer_function_index);
while let Some(section_index) = worklist.pop() {
for reloc in reloc_sections
.get(&section_index)
.iter()
.flat_map(|inner| inner.iter().flat_map(|inner2| inner2.iter()))
{ {
continue;
}
for reloc in reloc_section.iter() {
let kind = match reloc.r_type { let kind = match reloc.r_type {
// TODO: these constants are not per-arch, we'll need to // TODO: these constants are not per-arch, we'll need to
// make the whole match per-arch. // make the whole match per-arch.
goblin::elf::reloc::R_X86_64_64 => RelocationKind::Abs8, goblin::elf::reloc::R_X86_64_64 => RelocationKind::Abs8,
_ => unimplemented!("unknown relocation {}", reloc.r_type), _ => {
return Err(CompileError::Codegen(format!(
"unknown ELF relocation {}",
reloc.r_type
)));
}
}; };
let offset = reloc.r_offset as u32; let offset = reloc.r_offset as u32;
let addend = reloc.r_addend.unwrap_or(0); let addend = reloc.r_addend.unwrap_or(0);
let target = reloc.r_sym; let target = reloc.r_sym;
// TODO: error handling // TODO: error handling
let target = elf.syms.get(target).unwrap(); let elf_target = elf.syms.get(target).unwrap();
if target.st_type() == goblin::elf::sym::STT_SECTION { let elf_target_section = ElfSectionIndex::from_usize(elf_target.st_shndx)?;
let len = required_custom_sections.len(); let reloc_target = if elf_target.st_type() == goblin::elf::sym::STT_SECTION {
let entry = required_custom_sections.entry(target.st_shndx); if visited.insert(elf_target_section) {
let local_section_index = *entry.or_insert(len) as _; worklist.push(elf_target_section);
local_relocations.push(LocalRelocation { }
kind, elf_section_to_target(elf_target_section)
local_section_index, } else if elf_target.st_type() == goblin::elf::sym::STT_FUNC
offset, && elf_target_section == wasmer_function_index
addend,
});
} else if target.st_type() == goblin::elf::sym::STT_FUNC
&& target.st_shndx == wasmer_function_idx
{ {
// This is a function referencing its own byte stream. // This is a function referencing its own byte stream.
relocations.push(Relocation { RelocationTarget::LocalFunc(*local_func_index)
kind, } else if elf_target.st_type() == goblin::elf::sym::STT_NOTYPE
reloc_target: RelocationTarget::LocalFunc(*local_func_index), && elf_target_section.is_undef()
offset,
addend,
});
} else if target.st_type() == goblin::elf::sym::STT_NOTYPE
&& target.st_shndx == goblin::elf::section_header::SHN_UNDEF as _
{ {
// Not defined in this .o file. Maybe another local function? // Not defined in this .o file. Maybe another local function?
let name = target.st_name; let name = elf_target.st_name;
let name = elf.strtab.get(name).unwrap().unwrap(); let name = elf.strtab.get(name).unwrap().unwrap();
if let Some((index, _)) = if let Some((index, _)) =
func_names.iter().find(|(_, func_name)| *func_name == name) func_names.iter().find(|(_, func_name)| *func_name == name)
@@ -410,70 +473,78 @@ impl FuncTranslator {
let local_index = wasm_module let local_index = wasm_module
.local_func_index(index) .local_func_index(index)
.expect("Relocation to non-local function"); .expect("Relocation to non-local function");
relocations.push(Relocation { RelocationTarget::LocalFunc(local_index)
kind,
reloc_target: RelocationTarget::LocalFunc(local_index),
offset,
addend,
});
// Maybe a libcall then? // Maybe a libcall then?
} else if let Some(libcall) = libcalls.get(name) { } else if let Some(libcall) = libcalls.get(name) {
relocations.push(Relocation { RelocationTarget::LibCall(*libcall)
kind,
reloc_target: RelocationTarget::LibCall(*libcall),
offset,
addend,
});
} else { } else {
unimplemented!("reference to unknown symbol {}", name); unimplemented!("reference to unknown symbol {}", name);
} }
} else { } else {
unimplemented!("unknown relocation {:?} with target {:?}", reloc, target); unimplemented!("unknown relocation {:?} with target {:?}", reloc, target);
} };
relocations
.entry(section_index)
.or_default()
.push(Relocation {
kind,
reloc_target,
offset,
addend,
});
} }
} }
let mut custom_sections = vec![]; let mut custom_sections = section_to_custom_section
custom_sections.resize( .iter()
required_custom_sections.len(), .map(|(elf_section_index, custom_section_index)| {
(
custom_section_index,
CustomSection { CustomSection {
protection: CustomSectionProtection::Read, protection: CustomSectionProtection::Read,
bytes: SectionBody::default(), bytes: SectionBody::new_with_vec(section_bytes(*elf_section_index)),
relocations: vec![], relocations: relocations
.remove_entry(elf_section_index)
.map_or(vec![], |(_, v)| v),
}, },
); )
for (section_idx, local_section_idx) in required_custom_sections { })
let bytes = elf.section_headers[section_idx as usize].file_range(); .collect::<Vec<_>>();
let bytes = &mem_buf_slice[bytes.start..bytes.end]; custom_sections.sort_unstable_by_key(|a| a.0);
custom_sections[local_section_idx].bytes.extend(bytes); let custom_sections = custom_sections
} .into_iter()
.map(|(_, v)| v)
.collect::<PrimaryMap<SectionIndex, _>>();
let function_body = FunctionBody {
body: section_bytes(wasmer_function_index),
unwind_info: None,
};
let address_map = FunctionAddressMap { let address_map = FunctionAddressMap {
instructions: vec![InstructionAddressMap { instructions: vec![InstructionAddressMap {
srcloc: SourceLoc::default(), srcloc: SourceLoc::default(),
code_offset: 0, code_offset: 0,
code_len: bytes.len(), code_len: function_body.body.len(),
}], }],
start_srcloc: SourceLoc::default(), start_srcloc: SourceLoc::default(),
end_srcloc: SourceLoc::default(), end_srcloc: SourceLoc::default(),
body_offset: 0, body_offset: 0,
body_len: bytes.len(), body_len: function_body.body.len(),
}; };
Ok(( Ok((
CompiledFunction { CompiledFunction {
body: FunctionBody { body: function_body,
body: bytes,
unwind_info: None,
},
jt_offsets: SecondaryMap::new(), jt_offsets: SecondaryMap::new(),
relocations, relocations: relocations
.remove_entry(&wasmer_function_index)
.map_or(vec![], |(_, v)| v),
frame_info: CompiledFunctionFrameInfo { frame_info: CompiledFunctionFrameInfo {
address_map, address_map,
traps: vec![], traps: vec![],
}, },
}, },
local_relocations,
custom_sections, custom_sections,
)) ))
} }
@@ -2156,96 +2227,34 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
Operator::GlobalGet { global_index } => { Operator::GlobalGet { global_index } => {
let global_index = GlobalIndex::from_u32(global_index); let global_index = GlobalIndex::from_u32(global_index);
let global_type = module.globals[global_index]; match ctx.global(global_index, intrinsics) {
let global_value_type = global_type.ty; GlobalCache::Const { value } => {
// TODO: cache loads of const globals.
let _global_mutability = global_type.mutability;
let global_ptr =
if let Some(local_global_index) = module.local_global_index(global_index) {
let offset = self.vmoffsets.vmctx_vmglobal_definition(local_global_index);
let offset = intrinsics.i32_ty.const_int(offset.into(), false);
unsafe { builder.build_gep(*vmctx, &[offset], "") }
} else {
let offset = self.vmoffsets.vmctx_vmglobal_import(global_index);
let offset = intrinsics.i32_ty.const_int(offset.into(), false);
let global_ptr_ptr = unsafe { builder.build_gep(*vmctx, &[offset], "") };
let global_ptr_ptr = builder
.build_bitcast(global_ptr_ptr, intrinsics.i8_ptr_ty, "")
.into_pointer_value();
let global_ptr = builder.build_load(global_ptr_ptr, "");
builder
.build_bitcast(global_ptr, intrinsics.i8_ptr_ty, "")
.into_pointer_value()
};
let global_ptr = builder
.build_bitcast(
global_ptr,
type_to_llvm_ptr(&intrinsics, global_value_type),
"",
)
.into_pointer_value();
let value = builder.build_load(global_ptr, "");
// TODO: add TBAA info.
self.state.push1(value); self.state.push1(value);
} }
GlobalCache::Mut { ptr_to_value } => {
let value = builder.build_load(ptr_to_value, "");
// TODO: tbaa
self.state.push1(value);
}
}
}
Operator::GlobalSet { global_index } => { Operator::GlobalSet { global_index } => {
let global_index = GlobalIndex::from_u32(global_index); let global_index = GlobalIndex::from_u32(global_index);
let global_type = module.globals[global_index]; match ctx.global(global_index, intrinsics) {
let global_value_type = global_type.ty; GlobalCache::Const { value } => {
return Err(CompileError::Codegen(format!(
// Note that we don't check mutability, assuming that's already "global.set on immutable global index {}",
// been checked by some other verifier. global_index.as_u32()
)))
let global_ptr = }
if let Some(local_global_index) = module.local_global_index(global_index) {
let offset = self.vmoffsets.vmctx_vmglobal_definition(local_global_index);
let offset = intrinsics.i32_ty.const_int(offset.into(), false);
unsafe { builder.build_gep(*vmctx, &[offset], "") }
} else {
let offset = self.vmoffsets.vmctx_vmglobal_import(global_index);
let offset = intrinsics.i32_ty.const_int(offset.into(), false);
let global_ptr_ptr = unsafe { builder.build_gep(*vmctx, &[offset], "") };
let global_ptr_ptr = builder
.build_bitcast(global_ptr_ptr, intrinsics.i8_ptr_ty, "")
.into_pointer_value();
builder.build_load(global_ptr_ptr, "").into_pointer_value()
};
let global_ptr = builder
.build_bitcast(
global_ptr,
type_to_llvm_ptr(&intrinsics, global_value_type),
"",
)
.into_pointer_value();
let (value, info) = self.state.pop1_extra()?;
let value = apply_pending_canonicalization(builder, intrinsics, value, info);
builder.build_store(global_ptr, value);
// TODO: add TBAA info
/*
let (value, info) = self.state.pop1_extra()?;
let value = apply_pending_canonicalization(builder, intrinsics, value, info);
let index = GlobalIndex::from_u32(global_index);
let global_cache = ctx.global_cache(index, intrinsics, self.module);
match global_cache {
GlobalCache::Mut { ptr_to_value } => { GlobalCache::Mut { ptr_to_value } => {
let store = builder.build_store(ptr_to_value, value); let (value, info) = self.state.pop1_extra()?;
tbaa_label( let value =
&self.module, apply_pending_canonicalization(builder, intrinsics, value, info);
intrinsics, builder.build_store(ptr_to_value, value);
"global", // TODO: tbaa
store,
Some(global_index),
);
}
GlobalCache::Const { value: _ } => {
return Err(CompileError::Codegen("global is immutable".to_string()));
} }
} }
*/
} }
Operator::Select => { Operator::Select => {

View File

@@ -910,7 +910,7 @@ impl<'ctx, 'a> CtxType<'ctx, 'a> {
}) })
} }
pub fn table_prepare( fn table_prepare(
&mut self, &mut self,
table_index: TableIndex, table_index: TableIndex,
intrinsics: &Intrinsics<'ctx>, intrinsics: &Intrinsics<'ctx>,
@@ -952,6 +952,9 @@ impl<'ctx, 'a> CtxType<'ctx, 'a> {
); );
let ptr_to_bounds = let ptr_to_bounds =
unsafe { cache_builder.build_gep(ctx_ptr_value, &[offset], "") }; unsafe { cache_builder.build_gep(ctx_ptr_value, &[offset], "") };
let ptr_to_bounds = cache_builder
.build_bitcast(ptr_to_bounds, intrinsics.i32_ptr_ty, "")
.into_pointer_value();
(ptr_to_base_ptr, ptr_to_bounds) (ptr_to_base_ptr, ptr_to_bounds)
} else { } else {
let offset = intrinsics.i64_ty.const_int( let offset = intrinsics.i64_ty.const_int(
@@ -989,6 +992,9 @@ impl<'ctx, 'a> CtxType<'ctx, 'a> {
.const_int(offsets.vmtable_definition_current_elements().into(), false); .const_int(offsets.vmtable_definition_current_elements().into(), false);
let ptr_to_bounds = let ptr_to_bounds =
unsafe { cache_builder.build_gep(definition_ptr, &[offset], "") }; unsafe { cache_builder.build_gep(definition_ptr, &[offset], "") };
let ptr_to_bounds = cache_builder
.build_bitcast(ptr_to_bounds, intrinsics.i32_ptr_ty, "")
.into_pointer_value();
(ptr_to_base_ptr, ptr_to_bounds) (ptr_to_base_ptr, ptr_to_bounds)
}; };
TableCache { TableCache {
@@ -1008,10 +1014,14 @@ impl<'ctx, 'a> CtxType<'ctx, 'a> {
builder: &Builder<'ctx>, builder: &Builder<'ctx>,
) -> (PointerValue<'ctx>, IntValue<'ctx>) { ) -> (PointerValue<'ctx>, IntValue<'ctx>) {
let (ptr_to_base_ptr, ptr_to_bounds) = self.table_prepare(index, intrinsics, module); let (ptr_to_base_ptr, ptr_to_bounds) = self.table_prepare(index, intrinsics, module);
let base_ptr = builder let base_ptr = self
.cache_builder
.build_load(ptr_to_base_ptr, "base_ptr") .build_load(ptr_to_base_ptr, "base_ptr")
.into_pointer_value(); .into_pointer_value();
let bounds = builder.build_load(ptr_to_bounds, "bounds").into_int_value(); let bounds = self
.cache_builder
.build_load(ptr_to_bounds, "bounds")
.into_int_value();
tbaa_label( tbaa_label(
module, module,
intrinsics, intrinsics,
@@ -1078,106 +1088,59 @@ impl<'ctx, 'a> CtxType<'ctx, 'a> {
}) })
} }
pub fn global_cache( pub fn global(
&mut self, &mut self,
index: GlobalIndex, index: GlobalIndex,
intrinsics: &Intrinsics<'ctx>, intrinsics: &Intrinsics<'ctx>,
module: &Module<'ctx>,
) -> GlobalCache<'ctx> { ) -> GlobalCache<'ctx> {
let (cached_globals, ctx_ptr_value, wasm_module, cache_builder, offsets) = ( let (cached_globals, wasm_module, ctx_ptr_value, cache_builder, offsets) = (
&mut self.cached_globals, &mut self.cached_globals,
self.ctx_ptr_value,
self.wasm_module, self.wasm_module,
self.ctx_ptr_value,
&self.cache_builder, &self.cache_builder,
&self.offsets, &self.offsets,
); );
*cached_globals.entry(index).or_insert_with(|| { *cached_globals.entry(index).or_insert_with(|| {
let (globals_array_ptr_ptr, index, mutable, wasmer_ty, field_name) = { let global_type = wasm_module.globals[index];
let desc = wasm_module.globals.get(index).unwrap(); let global_value_type = global_type.ty;
if let Some(_local_global_index) = wasm_module.local_global_index(index) {
( let global_mutability = global_type.mutability;
unsafe { let global_ptr = if let Some(local_global_index) = wasm_module.local_global_index(index)
cache_builder {
.build_struct_gep( let offset = offsets.vmctx_vmglobal_definition(local_global_index);
ctx_ptr_value, let offset = intrinsics.i32_ty.const_int(offset.into(), false);
offset_to_index(offsets.vmctx_globals_begin()), unsafe { cache_builder.build_gep(ctx_ptr_value, &[offset], "") }
"globals_array_ptr_ptr",
)
.unwrap()
},
index.index() as u64,
desc.mutability,
desc.ty,
"context_field_ptr_to_local_globals",
)
} else { } else {
( let offset = offsets.vmctx_vmglobal_import(index);
unsafe { let offset = intrinsics.i32_ty.const_int(offset.into(), false);
cache_builder let global_ptr_ptr =
.build_struct_gep( unsafe { cache_builder.build_gep(ctx_ptr_value, &[offset], "") };
ctx_ptr_value, let global_ptr_ptr = cache_builder
offset_to_index(offsets.vmctx_imported_globals_begin()), .build_bitcast(
"globals_array_ptr_ptr", global_ptr_ptr,
intrinsics.i32_ptr_ty.ptr_type(AddressSpace::Generic),
"",
) )
.unwrap()
},
index.index() as u64,
desc.mutability,
desc.ty,
"context_field_ptr_to_imported_globals",
)
}
};
let llvm_ptr_ty = type_to_llvm_ptr(intrinsics, wasmer_ty);
let global_array_ptr = cache_builder
.build_load(globals_array_ptr_ptr, "global_array_ptr")
.into_pointer_value(); .into_pointer_value();
tbaa_label( cache_builder
module, .build_load(global_ptr_ptr, "")
intrinsics, .into_pointer_value()
field_name,
global_array_ptr.as_instruction_value().unwrap(),
None,
);
let const_index = intrinsics.i32_ty.const_int(index, false);
let global_ptr_ptr = unsafe {
cache_builder.build_in_bounds_gep(
global_array_ptr,
&[const_index],
"global_ptr_ptr",
)
}; };
let global_ptr = cache_builder let global_ptr = cache_builder
.build_load(global_ptr_ptr, "global_ptr") .build_bitcast(
global_ptr,
type_to_llvm_ptr(&intrinsics, global_value_type),
"",
)
.into_pointer_value(); .into_pointer_value();
tbaa_label(
module,
intrinsics,
"global_ptr",
global_ptr.as_instruction_value().unwrap(),
Some(index as u32),
);
let global_ptr_typed = match global_mutability {
cache_builder.build_pointer_cast(global_ptr, llvm_ptr_ty, "global_ptr_typed"); Mutability::Const => GlobalCache::Const {
value: cache_builder.build_load(global_ptr, ""),
let mutable = mutable == Mutability::Var; },
if mutable { Mutability::Var => GlobalCache::Mut {
GlobalCache::Mut { ptr_to_value: global_ptr,
ptr_to_value: global_ptr_typed, },
}
} else {
let value = cache_builder.build_load(global_ptr_typed, "global_value");
tbaa_label(
module,
intrinsics,
"global",
value.as_instruction_value().unwrap(),
Some(index as u32),
);
GlobalCache::Const { value }
} }
}) })
} }

View File

@@ -8428,8 +8428,7 @@ pub fn gen_import_call_trampoline(
); );
a.emit_host_redirection(GPR::RAX); a.emit_host_redirection(GPR::RAX);
let mut section_body = SectionBody::default(); let section_body = SectionBody::new_with_vec(a.finalize().unwrap().to_vec());
section_body.extend(&a.finalize().unwrap());
CustomSection { CustomSection {
protection: CustomSectionProtection::ReadExecute, protection: CustomSectionProtection::ReadExecute,

View File

@@ -118,7 +118,7 @@ impl Compiler for SinglepassCompiler {
Ok(Compilation::new(functions, import_trampolines)) Ok(Compilation::new(functions, import_trampolines))
} }
fn compile_wasm_trampolines( fn compile_function_call_trampolines(
&self, &self,
signatures: &[FunctionType], signatures: &[FunctionType],
) -> Result<Vec<FunctionBody>, CompileError> { ) -> Result<Vec<FunctionBody>, CompileError> {
@@ -128,6 +128,14 @@ impl Compiler for SinglepassCompiler {
.map(gen_std_trampoline) .map(gen_std_trampoline)
.collect()) .collect())
} }
fn compile_dynamic_function_trampolines(
&self,
module: &Module,
) -> Result<PrimaryMap<FunctionIndex, FunctionBody>, CompileError> {
Ok(PrimaryMap::new())
// unimplemented!("Dynamic funciton trampolines not yet implemented");
}
} }
trait ToCompileError { trait ToCompileError {

View File

@@ -9,7 +9,9 @@ use crate::target::Target;
use crate::FunctionBodyData; use crate::FunctionBodyData;
use crate::ModuleTranslationState; use crate::ModuleTranslationState;
use wasm_common::entity::PrimaryMap; use wasm_common::entity::PrimaryMap;
use wasm_common::{Features, FunctionType, LocalFunctionIndex, MemoryIndex, TableIndex}; use wasm_common::{
Features, FunctionIndex, FunctionType, LocalFunctionIndex, MemoryIndex, TableIndex,
};
use wasmer_runtime::Module; use wasmer_runtime::Module;
use wasmer_runtime::{MemoryPlan, TablePlan}; use wasmer_runtime::{MemoryPlan, TablePlan};
use wasmparser::{validate, OperatorValidatorConfig, ValidatingParserConfig}; use wasmparser::{validate, OperatorValidatorConfig, ValidatingParserConfig};
@@ -80,8 +82,30 @@ pub trait Compiler {
/// let func = instance.exports.func("my_func"); /// let func = instance.exports.func("my_func");
/// func.call(&[Value::I32(1)]); /// func.call(&[Value::I32(1)]);
/// ``` /// ```
fn compile_wasm_trampolines( fn compile_function_call_trampolines(
&self, &self,
signatures: &[FunctionType], signatures: &[FunctionType],
) -> Result<Vec<FunctionBody>, CompileError>; ) -> Result<Vec<FunctionBody>, CompileError>;
/// Compile the trampolines to call a dynamic function defined in
/// a host, from a Wasm module.
///
/// This allows us to create dynamic Wasm functions, such as:
///
/// ```ignore
/// fn my_func(values: Vec<Val>) -> Vec<Val> {
/// // do something
/// }
///
/// let my_func_type = FuncType::new(vec![Type::I32], vec![Type::I32]);
/// let imports = imports!{
/// "namespace" => {
/// "my_func" => Func::new_dynamic(my_func_type, my_func),s
/// }
/// }
/// ```
fn compile_dynamic_function_trampolines(
&self,
module: &Module,
) -> Result<PrimaryMap<FunctionIndex, FunctionBody>, CompileError>;
} }

View File

@@ -78,6 +78,7 @@ pub use crate::unwind::{CompiledFunctionUnwindInfo, FDERelocEntry, FunctionTable
pub use wasm_common::Features; pub use wasm_common::Features;
#[cfg(feature = "translator")]
/// wasmparser is exported as a module to slim compiler dependencies /// wasmparser is exported as a module to slim compiler dependencies
pub mod wasmparser { pub mod wasmparser {
pub use wasmparser::*; pub use wasmparser::*;

View File

@@ -25,7 +25,6 @@ pub enum CustomSectionProtection {
// We don't include `ReadWrite` here because it would complicate freeze // We don't include `ReadWrite` here because it would complicate freeze
// and resumption of executing Modules. // and resumption of executing Modules.
/// A custom section with read and execute permissions. /// A custom section with read and execute permissions.
ReadExecute, ReadExecute,
} }
@@ -56,14 +55,9 @@ pub struct CustomSection {
pub struct SectionBody(#[serde(with = "serde_bytes")] Vec<u8>); pub struct SectionBody(#[serde(with = "serde_bytes")] Vec<u8>);
impl SectionBody { impl SectionBody {
/// Extend the section with the bytes given. /// Create a new section body with the given contents.
pub fn extend(&mut self, contents: &[u8]) { pub fn new_with_vec(contents: Vec<u8>) -> Self {
self.0.extend(contents); Self(contents)
}
/// Extends the section by appending bytes from another section.
pub fn append(&mut self, body: &Self) {
self.0.extend(&body.0);
} }
/// Returns a raw pointer to the section's buffer. /// Returns a raw pointer to the section's buffer.
@@ -83,6 +77,6 @@ impl SectionBody {
/// Returns whether or not the section body is empty. /// Returns whether or not the section body is empty.
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.len() == 0 self.0.is_empty()
} }
} }

View File

@@ -5,20 +5,19 @@ pub use target_lexicon::{Architecture, CallingConvention, OperatingSystem, Tripl
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))] #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
use raw_cpuid::CpuId; use raw_cpuid::CpuId;
/// The nomenclature is inspired by the [raw-cpuid crate]. /// The nomenclature is inspired by the [`cpuid` crate].
/// The list of supported features was initially retrieved from /// The list of supported features was initially retrieved from
/// [cranelift-native]. /// [`cranelift-native`].
/// ///
/// The `CpuFeature` enum vaues are likely to grow closer to the /// The `CpuFeature` enum values are likely to grow closer to the
/// original cpuid. However, we prefer to start small and grow from there. /// original `cpuid`. However, we prefer to start small and grow from there.
/// ///
/// If you would like to use a flag that doesn't exist yet here, please /// If you would like to use a flag that doesn't exist yet here, please
/// open a PR. /// open a PR.
/// ///
/// [cpuid crate]: https://docs.rs/cpuid/0.1.1/cpuid/enum.CpuFeature.html /// [`cpuid` crate]: https://docs.rs/cpuid/0.1.1/cpuid/enum.CpuFeature.html
/// [cranelift-native]: https://github.com/bytecodealliance/cranelift/blob/6988545fd20249b084c53f4761b8c861266f5d31/cranelift-native/src/lib.rs#L51-L92 /// [`cranelift-native`]: https://github.com/bytecodealliance/cranelift/blob/6988545fd20249b084c53f4761b8c861266f5d31/cranelift-native/src/lib.rs#L51-L92
#[allow(missing_docs)] #[allow(missing_docs, clippy::derive_hash_xor_eq)]
#[allow(clippy::derive_hash_xor_eq)]
#[derive(EnumSetType, Debug, Hash)] #[derive(EnumSetType, Debug, Hash)]
pub enum CpuFeature { pub enum CpuFeature {
// X86 features // X86 features

View File

@@ -4,10 +4,9 @@ use crate::{CodeMemory, CompiledModule};
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use wasm_common::entity::PrimaryMap; use wasm_common::entity::PrimaryMap;
use wasm_common::{FunctionType, LocalFunctionIndex, MemoryIndex, SignatureIndex, TableIndex}; use wasm_common::{FunctionIndex, FunctionType, LocalFunctionIndex, SignatureIndex};
use wasmer_compiler::{ use wasmer_compiler::{
Compilation, CompileError, CustomSection, CustomSectionProtection, FunctionBody, SectionIndex, CompileError, CustomSection, CustomSectionProtection, FunctionBody, SectionIndex, Target,
Target,
}; };
#[cfg(feature = "compiler")] #[cfg(feature = "compiler")]
use wasmer_compiler::{Compiler, CompilerConfig}; use wasmer_compiler::{Compiler, CompilerConfig};
@@ -42,7 +41,7 @@ impl JITEngine {
Self { Self {
inner: Arc::new(Mutex::new(JITEngineInner { inner: Arc::new(Mutex::new(JITEngineInner {
compiler: Some(compiler), compiler: Some(compiler),
trampolines: HashMap::new(), function_call_trampolines: HashMap::new(),
code_memory: CodeMemory::new(), code_memory: CodeMemory::new(),
signatures: SignatureRegistry::new(), signatures: SignatureRegistry::new(),
})), })),
@@ -68,7 +67,7 @@ impl JITEngine {
inner: Arc::new(Mutex::new(JITEngineInner { inner: Arc::new(Mutex::new(JITEngineInner {
#[cfg(feature = "compiler")] #[cfg(feature = "compiler")]
compiler: None, compiler: None,
trampolines: HashMap::new(), function_call_trampolines: HashMap::new(),
code_memory: CodeMemory::new(), code_memory: CodeMemory::new(),
signatures: SignatureRegistry::new(), signatures: SignatureRegistry::new(),
})), })),
@@ -110,8 +109,8 @@ impl Engine for JITEngine {
} }
/// Retrieves a trampoline given a signature /// Retrieves a trampoline given a signature
fn trampoline(&self, sig: VMSharedSignatureIndex) -> Option<VMTrampoline> { fn function_call_trampoline(&self, sig: VMSharedSignatureIndex) -> Option<VMTrampoline> {
self.compiler().trampoline(sig) self.compiler().function_call_trampoline(sig)
} }
/// Validates a WebAssembly module /// Validates a WebAssembly module
@@ -176,7 +175,7 @@ pub struct JITEngineInner {
#[cfg(feature = "compiler")] #[cfg(feature = "compiler")]
compiler: Option<Box<dyn Compiler + Send>>, compiler: Option<Box<dyn Compiler + Send>>,
/// Pointers to trampoline functions used to enter particular signatures /// Pointers to trampoline functions used to enter particular signatures
trampolines: HashMap<VMSharedSignatureIndex, VMTrampoline>, function_call_trampolines: HashMap<VMSharedSignatureIndex, VMTrampoline>,
/// The code memory is responsible of publishing the compiled /// The code memory is responsible of publishing the compiled
/// functions to memory. /// functions to memory.
code_memory: CodeMemory, code_memory: CodeMemory,
@@ -237,8 +236,15 @@ impl JITEngineInner {
&mut self, &mut self,
module: &Module, module: &Module,
functions: &PrimaryMap<LocalFunctionIndex, FunctionBody>, functions: &PrimaryMap<LocalFunctionIndex, FunctionBody>,
trampolines: &PrimaryMap<SignatureIndex, FunctionBody>, function_call_trampolines: &PrimaryMap<SignatureIndex, FunctionBody>,
) -> Result<PrimaryMap<LocalFunctionIndex, *mut [VMFunctionBody]>, CompileError> { dynamic_function_trampolines: &PrimaryMap<FunctionIndex, FunctionBody>,
) -> Result<
(
PrimaryMap<LocalFunctionIndex, *mut [VMFunctionBody]>,
PrimaryMap<FunctionIndex, *const VMFunctionBody>,
),
CompileError,
> {
// Allocate all of the compiled functions into executable memory, // Allocate all of the compiled functions into executable memory,
// copying over their contents. // copying over their contents.
let allocated_functions = let allocated_functions =
@@ -251,10 +257,10 @@ impl JITEngineInner {
)) ))
})?; })?;
for (sig_index, compiled_function) in trampolines.iter() { for (sig_index, compiled_function) in function_call_trampolines.iter() {
let func_type = module.signatures.get(sig_index).unwrap(); let func_type = module.signatures.get(sig_index).unwrap();
let index = self.signatures.register(&func_type); let index = self.signatures.register(&func_type);
if self.trampolines.contains_key(&index) { if self.function_call_trampolines.contains_key(&index) {
// We don't need to allocate the trampoline in case // We don't need to allocate the trampoline in case
// it's signature is already allocated. // it's signature is already allocated.
continue; continue;
@@ -264,16 +270,34 @@ impl JITEngineInner {
.allocate_for_function(&compiled_function) .allocate_for_function(&compiled_function)
.map_err(|message| { .map_err(|message| {
CompileError::Resource(format!( CompileError::Resource(format!(
"failed to allocate memory for trampolines: {}", "failed to allocate memory for function call trampolines: {}",
message message
)) ))
})? })?
.as_ptr(); .as_ptr();
let trampoline = let trampoline =
unsafe { std::mem::transmute::<*const VMFunctionBody, VMTrampoline>(ptr) }; unsafe { std::mem::transmute::<*const VMFunctionBody, VMTrampoline>(ptr) };
self.trampolines.insert(index, trampoline); self.function_call_trampolines.insert(index, trampoline);
} }
Ok(allocated_functions)
let allocated_dynamic_function_trampolines = dynamic_function_trampolines
.values()
.map(|compiled_function| {
let ptr = self
.code_memory
.allocate_for_function(&compiled_function)
.map_err(|message| {
CompileError::Resource(format!(
"failed to allocate memory for dynamic function trampolines: {}",
message
))
})?
.as_ptr();
Ok(ptr)
})
.collect::<Result<PrimaryMap<FunctionIndex, _>, CompileError>>()?;
Ok((allocated_functions, allocated_dynamic_function_trampolines))
} }
/// Make memory containing compiled code executable. /// Make memory containing compiled code executable.
@@ -287,7 +311,7 @@ impl JITEngineInner {
} }
/// Gets the trampoline pre-registered for a particular signature /// Gets the trampoline pre-registered for a particular signature
pub fn trampoline(&self, sig: VMSharedSignatureIndex) -> Option<VMTrampoline> { pub fn function_call_trampoline(&self, sig: VMSharedSignatureIndex) -> Option<VMTrampoline> {
self.trampolines.get(&sig).cloned() self.function_call_trampolines.get(&sig).cloned()
} }
} }

View File

@@ -8,8 +8,8 @@ use std::any::Any;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use wasm_common::entity::{BoxedSlice, PrimaryMap}; use wasm_common::entity::{BoxedSlice, PrimaryMap};
use wasm_common::{ use wasm_common::{
DataInitializer, LocalFunctionIndex, MemoryIndex, OwnedDataInitializer, SignatureIndex, DataInitializer, FunctionIndex, LocalFunctionIndex, MemoryIndex, OwnedDataInitializer,
TableIndex, SignatureIndex, TableIndex,
}; };
use wasmer_compiler::CompileError; use wasmer_compiler::CompileError;
#[cfg(feature = "compiler")] #[cfg(feature = "compiler")]
@@ -30,6 +30,7 @@ pub struct CompiledModule {
serializable: SerializableModule, serializable: SerializableModule,
finished_functions: BoxedSlice<LocalFunctionIndex, *mut [VMFunctionBody]>, finished_functions: BoxedSlice<LocalFunctionIndex, *mut [VMFunctionBody]>,
finished_dynamic_function_trampolines: BoxedSlice<FunctionIndex, *const VMFunctionBody>,
signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>, signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
frame_info_registration: Mutex<Option<Option<GlobalFrameInfoRegistration>>>, frame_info_registration: Mutex<Option<Option<GlobalFrameInfoRegistration>>>,
} }
@@ -75,11 +76,14 @@ impl CompiledModule {
.values() .values()
.cloned() .cloned()
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let trampolines = compiler let function_call_trampolines = compiler
.compile_wasm_trampolines(&func_types)? .compile_function_call_trampolines(&func_types)?
.into_iter() .into_iter()
.collect::<PrimaryMap<SignatureIndex, _>>(); .collect::<PrimaryMap<SignatureIndex, _>>();
let dynamic_function_trampolines =
compiler.compile_dynamic_function_trampolines(&translation.module)?;
let data_initializers = translation let data_initializers = translation
.data_initializers .data_initializers
.iter() .iter()
@@ -98,7 +102,8 @@ impl CompiledModule {
function_relocations: compilation.get_relocations(), function_relocations: compilation.get_relocations(),
function_jt_offsets: compilation.get_jt_offsets(), function_jt_offsets: compilation.get_jt_offsets(),
function_frame_info: frame_infos, function_frame_info: frame_infos,
trampolines, function_call_trampolines,
dynamic_function_trampolines,
custom_sections: compilation.get_custom_sections(), custom_sections: compilation.get_custom_sections(),
custom_section_relocations: compilation.get_custom_section_relocations(), custom_section_relocations: compilation.get_custom_section_relocations(),
}; };
@@ -146,10 +151,11 @@ impl CompiledModule {
jit_compiler: &mut JITEngineInner, jit_compiler: &mut JITEngineInner,
serializable: SerializableModule, serializable: SerializableModule,
) -> Result<Self, CompileError> { ) -> Result<Self, CompileError> {
let finished_functions = jit_compiler.allocate( let (finished_functions, finished_dynamic_function_trampolines) = jit_compiler.allocate(
&serializable.module, &serializable.module,
&serializable.compilation.function_bodies, &serializable.compilation.function_bodies,
&serializable.compilation.trampolines, &serializable.compilation.function_call_trampolines,
&serializable.compilation.dynamic_function_trampolines,
)?; )?;
let custom_sections = let custom_sections =
jit_compiler.allocate_custom_sections(&serializable.compilation.custom_sections)?; jit_compiler.allocate_custom_sections(&serializable.compilation.custom_sections)?;
@@ -180,6 +186,8 @@ impl CompiledModule {
Ok(Self { Ok(Self {
serializable, serializable,
finished_functions: finished_functions.into_boxed_slice(), finished_functions: finished_functions.into_boxed_slice(),
finished_dynamic_function_trampolines: finished_dynamic_function_trampolines
.into_boxed_slice(),
signatures: signatures.into_boxed_slice(), signatures: signatures.into_boxed_slice(),
frame_info_registration: Mutex::new(None), frame_info_registration: Mutex::new(None),
}) })
@@ -211,6 +219,7 @@ impl CompiledModule {
&self.module(), &self.module(),
&sig_registry, &sig_registry,
resolver, resolver,
&self.finished_dynamic_function_trampolines,
self.memory_plans(), self.memory_plans(),
self.table_plans(), self.table_plans(),
) )

View File

@@ -2,7 +2,8 @@ use serde::{Deserialize, Serialize};
use std::sync::Arc; use std::sync::Arc;
use wasm_common::entity::PrimaryMap; use wasm_common::entity::PrimaryMap;
use wasm_common::{ use wasm_common::{
Features, LocalFunctionIndex, MemoryIndex, OwnedDataInitializer, SignatureIndex, TableIndex, Features, FunctionIndex, LocalFunctionIndex, MemoryIndex, OwnedDataInitializer, SignatureIndex,
TableIndex,
}; };
use wasmer_compiler::{ use wasmer_compiler::{
CustomSection, FunctionBody, JumpTableOffsets, Relocation, SectionBody, SectionIndex, CustomSection, FunctionBody, JumpTableOffsets, Relocation, SectionBody, SectionIndex,
@@ -21,7 +22,8 @@ pub struct SerializableCompilation {
// to allow lazy frame_info deserialization, we convert it to it's lazy binary // to allow lazy frame_info deserialization, we convert it to it's lazy binary
// format upon serialization. // format upon serialization.
pub function_frame_info: PrimaryMap<LocalFunctionIndex, SerializableFunctionFrameInfo>, pub function_frame_info: PrimaryMap<LocalFunctionIndex, SerializableFunctionFrameInfo>,
pub trampolines: PrimaryMap<SignatureIndex, FunctionBody>, pub function_call_trampolines: PrimaryMap<SignatureIndex, FunctionBody>,
pub dynamic_function_trampolines: PrimaryMap<FunctionIndex, FunctionBody>,
pub custom_sections: PrimaryMap<SectionIndex, CustomSection>, pub custom_sections: PrimaryMap<SectionIndex, CustomSection>,
pub custom_section_relocations: PrimaryMap<SectionIndex, Vec<Relocation>>, pub custom_section_relocations: PrimaryMap<SectionIndex, Vec<Relocation>>,
} }

View File

@@ -24,7 +24,7 @@ pub trait Engine {
fn lookup_signature(&self, sig: VMSharedSignatureIndex) -> Option<FunctionType>; fn lookup_signature(&self, sig: VMSharedSignatureIndex) -> Option<FunctionType>;
/// Retrieves a trampoline given a signature /// Retrieves a trampoline given a signature
fn trampoline(&self, sig: VMSharedSignatureIndex) -> Option<VMTrampoline>; fn function_call_trampoline(&self, sig: VMSharedSignatureIndex) -> Option<VMTrampoline>;
/// Validates a WebAssembly module /// Validates a WebAssembly module
fn validate(&self, binary: &[u8]) -> Result<(), CompileError>; fn validate(&self, binary: &[u8]) -> Result<(), CompileError>;

View File

@@ -3,11 +3,11 @@
use crate::error::{ImportError, LinkError}; use crate::error::{ImportError, LinkError};
use more_asserts::assert_ge; use more_asserts::assert_ge;
use wasm_common::entity::PrimaryMap; use wasm_common::entity::{BoxedSlice, EntityRef, PrimaryMap};
use wasm_common::{ExternType, ImportIndex, MemoryIndex, TableIndex}; use wasm_common::{ExternType, FunctionIndex, ImportIndex, MemoryIndex, TableIndex};
use wasmer_runtime::{ use wasmer_runtime::{
Export, Imports, SignatureRegistry, VMFunctionImport, VMGlobalImport, VMMemoryImport, Export, Imports, SignatureRegistry, VMFunctionBody, VMFunctionImport, VMFunctionKind,
VMTableImport, VMGlobalImport, VMMemoryImport, VMTableImport,
}; };
use wasmer_runtime::{MemoryPlan, TablePlan}; use wasmer_runtime::{MemoryPlan, TablePlan};
@@ -91,6 +91,7 @@ pub fn resolve_imports(
module: &Module, module: &Module,
signatures: &SignatureRegistry, signatures: &SignatureRegistry,
resolver: &dyn Resolver, resolver: &dyn Resolver,
finished_dynamic_function_trampolines: &BoxedSlice<FunctionIndex, *const VMFunctionBody>,
memory_plans: &PrimaryMap<MemoryIndex, MemoryPlan>, memory_plans: &PrimaryMap<MemoryIndex, MemoryPlan>,
_table_plans: &PrimaryMap<TableIndex, TablePlan>, _table_plans: &PrimaryMap<TableIndex, TablePlan>,
) -> Result<Imports, LinkError> { ) -> Result<Imports, LinkError> {
@@ -122,8 +123,21 @@ pub fn resolve_imports(
} }
match resolved { match resolved {
Export::Function(ref f) => { Export::Function(ref f) => {
let address = match f.kind {
VMFunctionKind::Dynamic => {
// If this is a dynamic imported function,
// the address of the funciton is the address of the
// reverse trampoline.
let index = FunctionIndex::new(function_imports.len());
finished_dynamic_function_trampolines[index]
// TODO: We should check that the f.vmctx actually matches
// the shape of `VMDynamicFunctionImportContext`
}
VMFunctionKind::Static => f.address,
};
function_imports.push(VMFunctionImport { function_imports.push(VMFunctionImport {
body: f.address, body: address,
vmctx: f.vmctx, vmctx: f.vmctx,
}); });
} }

View File

@@ -2,8 +2,8 @@ use crate::memory::LinearMemory;
use crate::module::{MemoryPlan, TablePlan}; use crate::module::{MemoryPlan, TablePlan};
use crate::table::Table; use crate::table::Table;
use crate::vmcontext::{ use crate::vmcontext::{
VMContext, VMFunctionBody, VMGlobalDefinition, VMMemoryDefinition, VMSharedSignatureIndex, VMContext, VMFunctionBody, VMFunctionKind, VMGlobalDefinition, VMMemoryDefinition,
VMTableDefinition, VMSharedSignatureIndex, VMTableDefinition,
}; };
use wasm_common::GlobalType; use wasm_common::GlobalType;
@@ -34,6 +34,8 @@ pub struct ExportFunction {
/// ///
/// Note that this indexes within the module associated with `vmctx`. /// Note that this indexes within the module associated with `vmctx`.
pub signature: VMSharedSignatureIndex, pub signature: VMSharedSignatureIndex,
/// The function kind (it defines how it's the signature that provided `address` have)
pub kind: VMFunctionKind,
} }
impl From<ExportFunction> for Export { impl From<ExportFunction> for Export {

View File

@@ -8,8 +8,8 @@ use crate::table::Table;
use crate::trap::{catch_traps, init_traps, Trap, TrapCode}; use crate::trap::{catch_traps, init_traps, Trap, TrapCode};
use crate::vmcontext::{ use crate::vmcontext::{
VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionBody, VMFunctionImport, VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionBody, VMFunctionImport,
VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition, VMMemoryImport, VMSharedSignatureIndex, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition, VMMemoryImport,
VMTableDefinition, VMTableImport, VMSharedSignatureIndex, VMTableDefinition, VMTableImport,
}; };
use crate::{ExportFunction, ExportGlobal, ExportMemory, ExportTable}; use crate::{ExportFunction, ExportGlobal, ExportMemory, ExportTable};
use crate::{Module, TableElements, VMOffsets}; use crate::{Module, TableElements, VMOffsets};
@@ -294,6 +294,11 @@ impl Instance {
}; };
ExportFunction { ExportFunction {
address, address,
// Any function received is already static at this point as:
// 1. All locally defined functions in the Wasm have a static signature.
// 2. All the imported functions are already static (because
// they point to the trampolines rather than the dynamic addresses).
kind: VMFunctionKind::Static,
signature, signature,
vmctx, vmctx,
} }

View File

@@ -50,9 +50,10 @@ pub use crate::sig_registry::SignatureRegistry;
pub use crate::table::Table; pub use crate::table::Table;
pub use crate::trap::*; pub use crate::trap::*;
pub use crate::vmcontext::{ pub use crate::vmcontext::{
VMBuiltinFunctionIndex, VMCallerCheckedAnyfunc, VMContext, VMFunctionBody, VMFunctionImport, VMBuiltinFunctionIndex, VMCallerCheckedAnyfunc, VMContext, VMDynamicFunctionImportContext,
VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition, VMMemoryImport, VMSharedSignatureIndex, VMFunctionBody, VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport,
VMTableDefinition, VMTableImport, VMTrampoline, VMMemoryDefinition, VMMemoryImport, VMSharedSignatureIndex, VMTableDefinition, VMTableImport,
VMTrampoline,
}; };
pub use crate::vmoffsets::{TargetSharedSignatureIndex, VMOffsets}; pub use crate::vmoffsets::{TargetSharedSignatureIndex, VMOffsets};

View File

@@ -46,6 +46,52 @@ mod test_vmfunction_import {
} }
} }
/// The `VMDynamicFunctionImportContext` is the context that dynamic
/// functions will receive when called (rather than `vmctx`).
/// A dynamic function is a function for which we don't know the signature
/// until runtime.
///
/// As such, we need to expose the dynamic function `context`
/// containing the relevant context for running the function indicated
/// in `address`.
#[repr(C)]
pub struct VMDynamicFunctionImportContext<T: Sized> {
/// The address of the inner dynamic function.
///
/// Note: The function must be on the form of
/// `(*mut T, *mut VMContext, SignatureIndex, *mut i128)`.
pub address: *const VMFunctionBody,
/// The context that the inner dynamic function will receive.
pub ctx: T,
}
#[cfg(test)]
mod test_vmdynamicfunction_import_context {
use super::VMDynamicFunctionImportContext;
use crate::{Module, VMOffsets};
use memoffset::offset_of;
use std::mem::size_of;
#[test]
fn check_vmdynamicfunction_import_context_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMDynamicFunctionImportContext<usize>>(),
usize::from(offsets.size_of_vmdynamicfunction_import_context())
);
assert_eq!(
offset_of!(VMDynamicFunctionImportContext<usize>, address),
usize::from(offsets.vmdynamicfunction_import_context_address())
);
assert_eq!(
offset_of!(VMDynamicFunctionImportContext<usize>, ctx),
usize::from(offsets.vmdynamicfunction_import_context_ctx())
);
}
}
/// A placeholder byte-sized type which is just used to provide some amount of type /// A placeholder byte-sized type which is just used to provide some amount of type
/// safety when dealing with pointers to JIT-compiled function bodies. Note that it's /// safety when dealing with pointers to JIT-compiled function bodies. Note that it's
/// deliberately not Copy, as we shouldn't be carelessly copying function body bytes /// deliberately not Copy, as we shouldn't be carelessly copying function body bytes
@@ -64,6 +110,26 @@ mod test_vmfunction_body {
} }
} }
/// A function kind.
#[derive(Debug, Copy, Clone, PartialEq)]
#[repr(C)]
pub enum VMFunctionKind {
/// A function is static when it's address matches the signature:
/// (vmctx, vmctx, arg1, arg2...) -> (result1, result2, ...)
///
/// This is the default for functions that are defined:
/// 1. In the Host, natively
/// 2. In the WebAssembly file
Static,
/// A function is dynamic when it's address matches the signature:
/// (ctx, &[Type]) -> Vec<Type>
///
/// This is the default for functions that are defined:
/// 1. In the Host, dynamically
Dynamic,
}
/// The fields compiled code needs to access to utilize a WebAssembly table /// The fields compiled code needs to access to utilize a WebAssembly table
/// imported from another instance. /// imported from another instance.
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]

View File

@@ -92,6 +92,30 @@ impl VMOffsets {
} }
} }
/// Offsets for [`VMDynamicFunctionImportContext`].
///
/// [`VMDynamicFunctionImportContext`]: crate::vmcontext::VMDynamicFunctionImportContext
impl VMOffsets {
/// The offset of the `address` field.
#[allow(clippy::erasing_op)]
pub fn vmdynamicfunction_import_context_address(&self) -> u8 {
0 * self.pointer_size
}
/// The offset of the `ctx` field.
#[allow(clippy::identity_op)]
pub fn vmdynamicfunction_import_context_ctx(&self) -> u8 {
1 * self.pointer_size
}
/// Return the size of [`VMDynamicFunctionImportContext`].
///
/// [`VMDynamicFunctionImportContext`]: crate::vmcontext::VMDynamicFunctionImportContext
pub fn size_of_vmdynamicfunction_import_context(&self) -> u8 {
2 * self.pointer_size
}
}
/// Offsets for `*const VMFunctionBody`. /// Offsets for `*const VMFunctionBody`.
impl VMOffsets { impl VMOffsets {
/// The size of the `current_elements` field. /// The size of the `current_elements` field.

View File

@@ -55,6 +55,7 @@ entity_impl!(MemoryIndex);
/// Index type of a signature (imported or local) inside the WebAssembly module. /// Index type of a signature (imported or local) inside the WebAssembly module.
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)] #[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
#[repr(transparent)]
pub struct SignatureIndex(u32); pub struct SignatureIndex(u32);
entity_impl!(SignatureIndex); entity_impl!(SignatureIndex);

View File

@@ -261,39 +261,26 @@ pub struct FunctionBody(*mut u8);
/// Represents a function that can be used by WebAssembly. /// Represents a function that can be used by WebAssembly.
#[derive(Clone, Debug, Hash, PartialEq, Eq)] #[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct Func<Args = (), Rets = (), Env = ()> { pub struct Func<Args = (), Rets = ()> {
address: *const FunctionBody, address: *const FunctionBody,
env: Option<*mut Env>,
_phantom: PhantomData<(Args, Rets)>, _phantom: PhantomData<(Args, Rets)>,
} }
unsafe impl<Args, Rets> Send for Func<Args, Rets> {} unsafe impl<Args, Rets> Send for Func<Args, Rets> {}
impl<Args, Rets, Env> Func<Args, Rets, Env> impl<Args, Rets> Func<Args, Rets>
where where
Args: WasmTypeList, Args: WasmTypeList,
Rets: WasmTypeList, Rets: WasmTypeList,
Env: Sized,
{ {
/// Creates a new `Func`. /// Creates a new `Func`.
pub fn new<F>(func: F) -> Self pub fn new<F, T, E>(func: F) -> Self
where where
F: HostFunction<Args, Rets, WithoutEnv, Env>, F: HostFunction<Args, Rets, T, E>,
T: HostFunctionKind,
E: Sized,
{ {
Self { Self {
env: None,
address: func.to_raw(),
_phantom: PhantomData,
}
}
/// Creates a new `Func` with a given `env`.
pub fn new_env<F>(env: &mut Env, func: F) -> Self
where
F: HostFunction<Args, Rets, WithEnv, Env>,
{
Self {
env: Some(env),
address: func.to_raw(), address: func.to_raw(),
_phantom: PhantomData, _phantom: PhantomData,
} }
@@ -304,11 +291,6 @@ where
FunctionType::new(Args::wasm_types(), Rets::wasm_types()) FunctionType::new(Args::wasm_types(), Rets::wasm_types())
} }
/// Get the type of the Func
pub fn env(&self) -> Option<*mut Env> {
self.env
}
/// Get the address of the Func /// Get the address of the Func
pub fn address(&self) -> *const FunctionBody { pub fn address(&self) -> *const FunctionBody {
self.address self.address