Merge branch 'master' into native-engine

# Conflicts:
#	Cargo.lock
#	lib/api/src/externals.rs
#	lib/compiler/src/relocation.rs
#	lib/engine-jit/src/link.rs
#	lib/engine/src/engine.rs
This commit is contained in:
Syrus
2020-05-18 17:26:36 -07:00
116 changed files with 15220 additions and 1274 deletions

View File

@@ -2,19 +2,23 @@ on: [push]
name: build name: build
env:
RUST_BACKTRACE: 1
jobs: jobs:
lint: lint:
name: Code lint name: Code lint
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Install Rust nightly - name: Install Rust stable
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
with: with:
toolchain: nightly-2019-12-19 profile: minimal
toolchain: 1.42.0
override: true override: true
components: rustfmt, clippy components: rustfmt, clippy
- run: cargo fmt --all -- --check - run: make lint
test: test:
name: Test in ${{ matrix.build }} name: Test in ${{ matrix.build }}
@@ -41,8 +45,6 @@ jobs:
toolchain: ${{ matrix.rust }} toolchain: ${{ matrix.rust }}
override: true override: true
- run: cargo test --release - run: cargo test --release
env:
RUST_BACKTRACE: 1
- name: Build and Test C API - name: Build and Test C API
run: | run: |
make capi make capi
@@ -51,5 +53,3 @@ jobs:
- name: Build C API on Windows - name: Build C API on Windows
run: make capi run: make capi
if: matrix.os == 'windows-latest' if: matrix.os == 'windows-latest'
env:
RUST_BACKTRACE: 1

55
Cargo.lock generated
View File

@@ -399,6 +399,31 @@ version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52ba6eb47c2131e784a38b726eb54c1e1484904f013e576a25354d0124161af6" checksum = "52ba6eb47c2131e784a38b726eb54c1e1484904f013e576a25354d0124161af6"
[[package]]
name = "dynasm"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42a814e1edeb85dd2a3c6fc0d6bf76d02ca5695d438c70ecee3d90774f3259c5"
dependencies = [
"bitflags",
"byteorder",
"lazy_static",
"owning_ref",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "dynasmrt"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a393aaeb4441a48bcf47b5b6155971f82cc1eb77e22855403ccc0415ac8328d"
dependencies = [
"byteorder",
"memmap",
]
[[package]] [[package]]
name = "either" name = "either"
version = "1.5.3" version = "1.5.3"
@@ -595,7 +620,7 @@ dependencies = [
[[package]] [[package]]
name = "inkwell" name = "inkwell"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/theDan64/inkwell?rev=af1846fd8aea530cef9a59170715e5c6c64346f6#af1846fd8aea530cef9a59170715e5c6c64346f6" source = "git+https://github.com/theDan64/inkwell?rev=1bfecc0a095d7ffdfa20a64630864f0297349508#1bfecc0a095d7ffdfa20a64630864f0297349508"
dependencies = [ dependencies = [
"either", "either",
"inkwell_internals", "inkwell_internals",
@@ -609,7 +634,7 @@ dependencies = [
[[package]] [[package]]
name = "inkwell_internals" name = "inkwell_internals"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/theDan64/inkwell?rev=af1846fd8aea530cef9a59170715e5c6c64346f6#af1846fd8aea530cef9a59170715e5c6c64346f6" source = "git+https://github.com/theDan64/inkwell?rev=1bfecc0a095d7ffdfa20a64630864f0297349508#1bfecc0a095d7ffdfa20a64630864f0297349508"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -667,9 +692,9 @@ checksum = "3576a87f2ba00f6f106fdfcd16db1d698d648a26ad8e0573cad8537c3c362d2a"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.69" version = "0.2.70"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99e85c08494b21a9054e7fe1374a732aeadaff3980b6990b94bfd3a70f690005" checksum = "3baa92041a6fec78c687fa0cc2b3fae8884f743d672cf551bed1d6dac6988d0f"
[[package]] [[package]]
name = "libloading" name = "libloading"
@@ -838,6 +863,15 @@ dependencies = [
"sdl2", "sdl2",
] ]
[[package]]
name = "owning_ref"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ff55baddef9e4ad00f88b6c743a2a8062d4c6ade126c2a528644b8e444d52ce"
dependencies = [
"stable_deref_trait",
]
[[package]] [[package]]
name = "parking_lot" name = "parking_lot"
version = "0.10.2" version = "0.10.2"
@@ -1298,6 +1332,12 @@ version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7cb5678e1615754284ec264d9bb5b4c27d2018577fd90ac0ceb578591ed5ee4" checksum = "c7cb5678e1615754284ec264d9bb5b4c27d2018577fd90ac0ceb578591ed5ee4"
[[package]]
name = "stable_deref_trait"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dba1a27d3efae4351c8051072d619e3ade2820635c3958d826bfea39d59b54c8"
[[package]] [[package]]
name = "string-interner" name = "string-interner"
version = "0.7.1" version = "0.7.1"
@@ -1403,10 +1443,12 @@ dependencies = [
name = "test-utils" name = "test-utils"
version = "0.16.2" version = "0.16.2"
dependencies = [ dependencies = [
"wasmer",
"wasmer-compiler", "wasmer-compiler",
"wasmer-compiler-cranelift", "wasmer-compiler-cranelift",
"wasmer-compiler-llvm", "wasmer-compiler-llvm",
"wasmer-compiler-singlepass", "wasmer-compiler-singlepass",
"wasmer-engine-jit",
] ]
[[package]] [[package]]
@@ -1694,10 +1736,15 @@ dependencies = [
name = "wasmer-compiler-singlepass" name = "wasmer-compiler-singlepass"
version = "0.16.2" version = "0.16.2"
dependencies = [ dependencies = [
"byteorder",
"dynasm",
"dynasmrt",
"hashbrown", "hashbrown",
"lazy_static",
"more-asserts", "more-asserts",
"rayon", "rayon",
"serde", "serde",
"smallvec",
"wasm-common", "wasm-common",
"wasmer-compiler", "wasmer-compiler",
"wasmer-runtime", "wasmer-runtime",

View File

@@ -7,6 +7,11 @@ test:
doc: doc:
cargo doc --all-features --document-private-items cargo doc --all-features --document-private-items
RUSTFLAGS := "-D dead-code -D nonstandard-style -D unused-imports -D unused-mut -D unused-variables -D unused-unsafe -D unreachable-patterns -D bad-style -D improper-ctypes -D unused-allocation -D unused-comparisons -D while-true -D unconditional-recursion -D bare-trait-objects -D mutable-borrow-reservation-conflict" # TODO: add `-D missing-docs`
lint:
cargo fmt --all -- --check
RUSTFLAGS=${RUSTFLAGS} cargo clippy
capi-singlepass: capi-singlepass:
cargo build --manifest-path lib/c-api/Cargo.toml --release \ cargo build --manifest-path lib/c-api/Cargo.toml --release \
--no-default-features --features singlepass-backend,wasi --no-default-features --features singlepass-backend,wasi

6
bors.toml Normal file
View File

@@ -0,0 +1,6 @@
status = [
"wasmerio.wasmer"
]
required_approvals = 1
timeout_sec = 7200
delete_merged_branches = true

View File

@@ -13,10 +13,6 @@ use test_generator::{
with_features, with_test_module, Testsuite, with_features, with_test_module, Testsuite,
}; };
fn is_truthy_env(name: &str) -> bool {
env::var(name).map(|n| n == "1").unwrap_or_default()
}
fn main() -> anyhow::Result<()> { fn main() -> anyhow::Result<()> {
println!("cargo:rerun-if-changed=build.rs"); println!("cargo:rerun-if-changed=build.rs");
println!("cargo:rerun-if-changed=tests/ignores.txt"); println!("cargo:rerun-if-changed=tests/ignores.txt");
@@ -30,7 +26,7 @@ fn main() -> anyhow::Result<()> {
let mut spectests = Testsuite { let mut spectests = Testsuite {
buffer: String::new(), buffer: String::new(),
path: vec![], path: vec![],
ignores: ignores.clone(), ignores,
}; };
let backends = vec!["singlepass", "cranelift", "llvm"]; let backends = vec!["singlepass", "cranelift", "llvm"];

View File

@@ -29,7 +29,7 @@ winapi = "0.3.8"
[dev-dependencies] [dev-dependencies]
# for the binary wasmer.rs # for the binary wasmer.rs
libc = "0.2" libc = { version = "0.2.70", default-features = false }
wat = "1.0.15" wat = "1.0.15"
tempfile = "3.1" tempfile = "3.1"
anyhow = "1.0.28" anyhow = "1.0.28"
@@ -41,7 +41,7 @@ maintenance = { status = "actively-developed" }
default = ["wat", "cranelift", "jit"] default = ["wat", "cranelift", "jit"]
compiler = ["wasmer-engine-jit/compiler"] compiler = ["wasmer-engine-jit/compiler"]
engine = [] engine = []
jit = ["wasmer-engine-jit"] jit = ["wasmer-engine-jit", "engine"]
singlepass = [ singlepass = [
"wasmer-compiler-singlepass", "wasmer-compiler-singlepass",
"compiler", "compiler",

View File

@@ -60,6 +60,11 @@ impl Exports {
self.map.len() self.map.len()
} }
/// Return whether or not there are no exports
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Insert a new export into this `Exports` map. /// Insert a new export into this `Exports` map.
pub fn insert<S, E>(&mut self, name: S, value: E) pub fn insert<S, E>(&mut self, name: S, value: E)
where where
@@ -84,7 +89,7 @@ impl Exports {
/// type checking manually, please use `get_extern`. /// type checking manually, please use `get_extern`.
pub fn get<'a, T: Exportable<'a>>(&'a self, name: &str) -> Result<&T, ExportError> { pub fn get<'a, T: Exportable<'a>>(&'a self, name: &str) -> Result<&T, ExportError> {
match self.map.get(name) { match self.map.get(name) {
None => return Err(ExportError::Missing(name.to_string())), None => Err(ExportError::Missing(name.to_string())),
Some(extern_) => T::get_self_from_extern(extern_), Some(extern_) => T::get_self_from_extern(extern_),
} }
} }
@@ -114,14 +119,12 @@ impl Exports {
self.map.get(name) self.map.get(name)
} }
/// Returns true if the `Exports` contains the given name. /// Returns true if the `Exports` contains the given export name.
pub fn contains<S>(&mut self, name: S) -> bool pub fn contains<S>(&self, name: S) -> bool
where where
S: Into<String>, S: Into<String>,
{ {
Arc::get_mut(&mut self.map) self.map.contains_key(&name.into())
.unwrap()
.contains_key(&name.into())
} }
} }

View File

@@ -7,12 +7,14 @@ use crate::RuntimeError;
use crate::{ExternType, FunctionType, GlobalType, MemoryType, TableType, ValType}; use crate::{ExternType, FunctionType, GlobalType, MemoryType, TableType, ValType};
use std::cmp::max; use std::cmp::max;
use std::slice; use std::slice;
use wasm_common::{Bytes, HostFunction, Pages, ValueType, WasmTypeList, WithEnv, WithoutEnv}; use wasm_common::{
use wasmer_engine::Engine as _; HostFunction, Pages, SignatureIndex, ValueType, WasmTypeList, WithEnv, WithoutEnv,
};
use wasmer_runtime::{ use wasmer_runtime::{
wasmer_call_trampoline, Export, ExportFunction, ExportGlobal, ExportMemory, ExportTable, wasmer_call_trampoline, Export, ExportFunction, ExportGlobal, ExportMemory, ExportTable,
LinearMemory, Table as RuntimeTable, VMCallerCheckedAnyfunc, VMContext, VMFunctionBody, InstanceHandle, LinearMemory, MemoryError, Table as RuntimeTable, VMCallerCheckedAnyfunc,
VMGlobalDefinition, VMMemoryDefinition, VMTrampoline, VMContext, VMDynamicFunctionImportContext, VMFunctionBody, VMFunctionKind, VMGlobalDefinition,
VMMemoryDefinition, VMTrampoline,
}; };
#[derive(Clone)] #[derive(Clone)]
@@ -26,10 +28,10 @@ pub enum Extern {
impl Extern { impl Extern {
pub fn ty(&self) -> ExternType { pub fn ty(&self) -> ExternType {
match self { match self {
Extern::Function(ft) => ExternType::Function(ft.ty().clone()), Extern::Function(ft) => ExternType::Function(ft.ty()),
Extern::Memory(ft) => ExternType::Memory(ft.ty().clone()), Extern::Memory(ft) => ExternType::Memory(*ft.ty()),
Extern::Table(tt) => ExternType::Table(tt.ty().clone()), Extern::Table(tt) => ExternType::Table(*tt.ty()),
Extern::Global(gt) => ExternType::Global(gt.ty().clone()), Extern::Global(gt) => ExternType::Global(*gt.ty()),
} }
} }
@@ -162,7 +164,9 @@ impl Global {
pub fn set(&self, val: Val) -> Result<(), RuntimeError> { pub fn set(&self, val: Val) -> Result<(), RuntimeError> {
if self.ty().mutability != Mutability::Var { if self.ty().mutability != Mutability::Var {
return Err(RuntimeError::new(format!("immutable global cannot be set"))); return Err(RuntimeError::new(
"immutable global cannot be set".to_string(),
));
} }
if val.ty() != self.ty().ty { if val.ty() != self.ty().ty {
return Err(RuntimeError::new(format!( return Err(RuntimeError::new(format!(
@@ -310,7 +314,7 @@ impl Table {
src_index, src_index,
len, len,
) )
.map_err(|e| RuntimeError::from_trap(e))?; .map_err(RuntimeError::from_trap)?;
Ok(()) Ok(())
} }
@@ -344,25 +348,25 @@ pub struct Memory {
} }
impl Memory { impl Memory {
pub fn new(store: &Store, ty: MemoryType) -> Memory { pub fn new(store: &Store, ty: MemoryType) -> Result<Memory, MemoryError> {
let tunables = store.engine().tunables(); let tunables = store.engine().tunables();
let memory_plan = tunables.memory_plan(ty); let memory_plan = tunables.memory_plan(ty);
let memory = tunables.create_memory(memory_plan).unwrap(); let memory = tunables.create_memory(memory_plan)?;
let definition = memory.vmmemory(); let definition = memory.vmmemory();
Memory { Ok(Memory {
store: store.clone(), store: store.clone(),
owned_by_store: true, owned_by_store: true,
exported: ExportMemory { exported: ExportMemory {
from: Box::leak(Box::new(memory)), from: Box::leak(Box::new(memory)),
definition: Box::leak(Box::new(definition)), definition: Box::leak(Box::new(definition)),
}, },
} })
} }
fn definition(&self) -> &VMMemoryDefinition { fn definition(&self) -> VMMemoryDefinition {
unsafe { &*self.exported.definition } self.memory().vmmemory()
} }
pub fn ty(&self) -> &MemoryType { pub fn ty(&self) -> &MemoryType {
@@ -377,6 +381,9 @@ impl Memory {
self.data_unchecked_mut() self.data_unchecked_mut()
} }
/// TODO: document this function, it's trivial to cause UB/break soundness with this
/// method.
#[allow(clippy::mut_from_ref)]
pub unsafe fn data_unchecked_mut(&self) -> &mut [u8] { pub unsafe fn data_unchecked_mut(&self) -> &mut [u8] {
let definition = self.definition(); let definition = self.definition();
slice::from_raw_parts_mut(definition.base, definition.current_length) slice::from_raw_parts_mut(definition.base, definition.current_length)
@@ -391,14 +398,14 @@ impl Memory {
} }
pub fn size(&self) -> Pages { pub fn size(&self) -> Pages {
Bytes(self.data_size()).into() self.memory().size()
} }
fn memory(&self) -> &LinearMemory { fn memory(&self) -> &LinearMemory {
unsafe { &*self.exported.from } unsafe { &*self.exported.from }
} }
pub fn grow(&self, delta: Pages) -> Option<Pages> { pub fn grow(&self, delta: Pages) -> Result<Pages, MemoryError> {
self.memory().grow(delta) self.memory().grow(delta)
} }
@@ -445,7 +452,7 @@ impl Memory {
Memory { Memory {
store: store.clone(), store: store.clone(),
owned_by_store: false, owned_by_store: false,
exported: wasmer_export.clone(), exported: wasmer_export,
} }
} }
} }
@@ -473,32 +480,26 @@ impl Drop for Memory {
/// A function defined in the Wasm module /// A function defined in the Wasm module
#[derive(Clone, PartialEq)] #[derive(Clone, PartialEq)]
pub struct WasmFunc { pub struct WasmFunctionDefinition {
// The trampoline to do the call // The trampoline to do the call
trampoline: VMTrampoline, trampoline: VMTrampoline,
} }
/// A function defined in the Host
#[derive(Clone, PartialEq)]
pub struct HostFunc {
// func: wasm_common::Func<Args, Rets>,
}
/// The inner helper /// The inner helper
#[derive(Clone, PartialEq)] #[derive(Clone, PartialEq)]
pub enum InnerFunc { pub enum FunctionDefinition {
/// A function defined in the Wasm side /// A function defined in the Wasm side
Wasm(WasmFunc), Wasm(WasmFunctionDefinition),
/// A function defined in the Host side /// A function defined in the Host side
Host(HostFunc), Host,
} }
/// A WebAssembly `function`. /// A WebAssembly `function`.
#[derive(Clone, PartialEq)] #[derive(Clone, PartialEq)]
pub struct Function { pub struct Function {
store: Store, store: Store,
definition: FunctionDefinition,
// If the Function is owned by the Store, not the instance // If the Function is owned by the Store, not the instance
inner: InnerFunc,
owned_by_store: bool, owned_by_store: bool,
exported: ExportFunction, exported: ExportFunction,
} }
@@ -515,21 +516,71 @@ impl Function {
Rets: WasmTypeList, Rets: WasmTypeList,
Env: Sized, Env: Sized,
{ {
let func: wasm_common::Func<Args, Rets, Env> = wasm_common::Func::new(func); let func: wasm_common::Func<Args, Rets> = wasm_common::Func::new(func);
let address = func.address() as *const VMFunctionBody; let address = func.address() as *const VMFunctionBody;
let vmctx = (func.env().unwrap_or(std::ptr::null_mut()) as *mut _) as *mut VMContext; let vmctx = std::ptr::null_mut() as *mut _ as *mut VMContext;
let func_type = func.ty(); let func_type = func.ty();
let signature = store.engine().register_signature(&func_type); let signature = store.engine().register_signature(&func_type);
Self { Self {
store: store.clone(), store: store.clone(),
owned_by_store: true, owned_by_store: true,
inner: InnerFunc::Host(HostFunc { definition: FunctionDefinition::Host,
// func
}),
exported: ExportFunction { exported: ExportFunction {
address, address,
vmctx, vmctx,
signature, signature,
kind: VMFunctionKind::Static,
},
}
}
#[allow(clippy::cast_ptr_alignment)]
pub fn new_dynamic<F>(store: &Store, ty: &FunctionType, func: F) -> Self
where
F: Fn(&[Val]) -> Result<Vec<Val>, RuntimeError> + 'static,
{
let dynamic_ctx =
VMDynamicFunctionImportContext::from_context(VMDynamicFunctionWithoutEnv {
func: Box::new(func),
});
let address = std::ptr::null() as *const () as *const VMFunctionBody;
let vmctx = Box::leak(Box::new(dynamic_ctx)) as *mut _ as *mut VMContext;
let signature = store.engine().register_signature(&ty);
Self {
store: store.clone(),
owned_by_store: true,
definition: FunctionDefinition::Host,
exported: ExportFunction {
address,
kind: VMFunctionKind::Dynamic,
vmctx,
signature,
},
}
}
#[allow(clippy::cast_ptr_alignment)]
pub fn new_dynamic_env<F, Env>(store: &Store, ty: &FunctionType, env: &mut Env, func: F) -> Self
where
F: Fn(&mut Env, &[Val]) -> Result<Vec<Val>, RuntimeError> + 'static,
Env: Sized,
{
let dynamic_ctx = VMDynamicFunctionImportContext::from_context(VMDynamicFunctionWithEnv {
env,
func: Box::new(func),
});
let address = std::ptr::null() as *const () as *const VMFunctionBody;
let vmctx = Box::leak(Box::new(dynamic_ctx)) as *mut _ as *mut VMContext;
let signature = store.engine().register_signature(&ty);
Self {
store: store.clone(),
owned_by_store: true,
definition: FunctionDefinition::Host,
exported: ExportFunction {
address,
kind: VMFunctionKind::Dynamic,
vmctx,
signature,
}, },
} }
} }
@@ -546,19 +597,23 @@ impl Function {
Rets: WasmTypeList, Rets: WasmTypeList,
Env: Sized, Env: Sized,
{ {
let func: wasm_common::Func<Args, Rets, Env> = wasm_common::Func::new_env(env, func); let func: wasm_common::Func<Args, Rets> = wasm_common::Func::new(func);
let address = func.address() as *const VMFunctionBody; let address = func.address() as *const VMFunctionBody;
let vmctx = (func.env().unwrap_or(std::ptr::null_mut()) as *mut _) as *mut VMContext; // TODO: We need to refactor the Function context.
// Right now is structured as it's always a `VMContext`. However, only
// Wasm-defined functions have a `VMContext`.
// In the case of Host-defined functions `VMContext` is whatever environment
// the user want to attach to the function.
let vmctx = env as *mut _ as *mut VMContext;
let func_type = func.ty(); let func_type = func.ty();
let signature = store.engine().register_signature(&func_type); let signature = store.engine().register_signature(&func_type);
Self { Self {
store: store.clone(), store: store.clone(),
owned_by_store: true, owned_by_store: true,
inner: InnerFunc::Host(HostFunc { definition: FunctionDefinition::Host,
// func
}),
exported: ExportFunction { exported: ExportFunction {
address, address,
kind: VMFunctionKind::Static,
vmctx, vmctx,
signature, signature,
}, },
@@ -580,7 +635,7 @@ impl Function {
fn call_wasm( fn call_wasm(
&self, &self,
func: &WasmFunc, func: &WasmFunctionDefinition,
params: &[Val], params: &[Val],
results: &mut [Val], results: &mut [Val],
) -> Result<(), RuntimeError> { ) -> Result<(), RuntimeError> {
@@ -642,10 +697,10 @@ impl Function {
} }
// Load the return values out of `values_vec`. // Load the return values out of `values_vec`.
for (index, value_type) in signature.results().iter().enumerate() { for (index, &value_type) in signature.results().iter().enumerate() {
unsafe { unsafe {
let ptr = values_vec.as_ptr().add(index); let ptr = values_vec.as_ptr().add(index);
results[index] = Val::read_value_from(ptr, value_type.clone()); results[index] = Val::read_value_from(ptr, value_type);
} }
} }
@@ -671,8 +726,8 @@ impl Function {
/// call the trampoline. /// call the trampoline.
pub fn call(&self, params: &[Val]) -> Result<Box<[Val]>, RuntimeError> { pub fn call(&self, params: &[Val]) -> Result<Box<[Val]>, RuntimeError> {
let mut results = vec![Val::null(); self.result_arity()]; let mut results = vec![Val::null(); self.result_arity()];
match &self.inner { match &self.definition {
InnerFunc::Wasm(wasm) => { FunctionDefinition::Wasm(wasm) => {
self.call_wasm(&wasm, params, &mut results)?; self.call_wasm(&wasm, params, &mut results)?;
} }
_ => {} // _ => unimplemented!("The host is unimplemented"), _ => {} // _ => unimplemented!("The host is unimplemented"),
@@ -683,12 +738,12 @@ impl Function {
pub(crate) fn from_export(store: &Store, wasmer_export: ExportFunction) -> Self { pub(crate) fn from_export(store: &Store, wasmer_export: ExportFunction) -> Self {
let trampoline = store let trampoline = store
.engine() .engine()
.trampoline(wasmer_export.signature) .function_call_trampoline(wasmer_export.signature)
.expect("Can't get trampoline for the function"); .expect("Can't get call trampoline for the function");
Self { Self {
store: store.clone(), store: store.clone(),
owned_by_store: false, owned_by_store: false,
inner: InnerFunc::Wasm(WasmFunc { trampoline }), definition: FunctionDefinition::Wasm(WasmFunctionDefinition { trampoline }),
exported: wasmer_export, exported: wasmer_export,
} }
} }
@@ -719,3 +774,113 @@ impl std::fmt::Debug for Function {
Ok(()) Ok(())
} }
} }
/// This trait is one that all dynamic funcitons must fulfill.
trait VMDynamicFunction {
fn call(&self, args: &[Val]) -> Result<Vec<Val>, RuntimeError>;
}
struct VMDynamicFunctionWithoutEnv {
func: Box<dyn Fn(&[Val]) -> Result<Vec<Val>, RuntimeError> + 'static>,
}
impl VMDynamicFunction for VMDynamicFunctionWithoutEnv {
fn call(&self, args: &[Val]) -> Result<Vec<Val>, RuntimeError> {
(*self.func)(&args)
}
}
struct VMDynamicFunctionWithEnv<Env>
where
Env: Sized,
{
func: Box<dyn Fn(&mut Env, &[Val]) -> Result<Vec<Val>, RuntimeError> + 'static>,
env: *mut Env,
}
impl<Env> VMDynamicFunction for VMDynamicFunctionWithEnv<Env>
where
Env: Sized,
{
fn call(&self, args: &[Val]) -> Result<Vec<Val>, RuntimeError> {
unsafe { (*self.func)(&mut *self.env, &args) }
}
}
trait VMDynamicFunctionImportCall<T: VMDynamicFunction> {
fn from_context(ctx: T) -> Self;
fn address_ptr() -> *const VMFunctionBody;
unsafe fn func_wrapper(
&self,
caller_vmctx: *mut VMContext,
sig_index: SignatureIndex,
values_vec: *mut i128,
);
}
impl<T: VMDynamicFunction> VMDynamicFunctionImportCall<T> for VMDynamicFunctionImportContext<T> {
fn from_context(ctx: T) -> Self {
Self {
address: Self::address_ptr(),
ctx,
}
}
fn address_ptr() -> *const VMFunctionBody {
Self::func_wrapper as *const () as *const VMFunctionBody
}
// This function wraps our func, to make it compatible with the
// reverse trampoline signature
unsafe fn func_wrapper(
// Note: we use the trick that the first param to this function is the `VMDynamicFunctionImportContext`
// itself, so rather than doing `dynamic_ctx: &VMDynamicFunctionImportContext<T>`, we simplify it a bit
&self,
caller_vmctx: *mut VMContext,
sig_index: SignatureIndex,
values_vec: *mut i128,
) {
use std::panic::{self, AssertUnwindSafe};
let result = panic::catch_unwind(AssertUnwindSafe(|| {
// This is actually safe, since right now the function signature
// receives two contexts:
// 1. `vmctx`: the context associated to where the function is defined.
// It will be `VMContext` in case is defined in Wasm, and a custom
// `Env` in case is host defined.
// 2. `caller_vmctx`: the context associated to whoever is calling that function.
//
// Because this code will only be reached when calling from wasm to host, we
// can assure the callee_vmctx is indeed a VMContext, and hence is completely
// safe to get a handle from it.
let handle = InstanceHandle::from_vmctx(caller_vmctx);
let module = handle.module_ref();
let func_ty = &module.signatures[sig_index];
let mut args = Vec::with_capacity(func_ty.params().len());
for (i, ty) in func_ty.params().iter().enumerate() {
args.push(Val::read_value_from(values_vec.add(i), *ty));
}
let returns = self.ctx.call(&args)?;
// We need to dynamically check that the returns
// match the expected types, as well as expected length.
let return_types = returns.iter().map(|ret| ret.ty()).collect::<Vec<_>>();
if return_types != func_ty.results() {
return Err(RuntimeError::new(format!(
"Dynamic function returned wrong signature. Expected {:?} but got {:?}",
func_ty.results(),
return_types
)));
}
for (i, ret) in returns.iter().enumerate() {
ret.write_value_to(values_vec.add(i));
}
Ok(())
}));
match result {
Ok(Ok(())) => {}
Ok(Err(trap)) => wasmer_runtime::raise_user_trap(Box::new(trap)),
Err(panic) => wasmer_runtime::resume_panic(panic),
}
}
}

View File

@@ -61,8 +61,8 @@ impl Instance {
.map(|export| { .map(|export| {
let name = export.name().to_string(); let name = export.name().to_string();
let export = handle.lookup(&name).expect("export"); let export = handle.lookup(&name).expect("export");
let extern_ = Extern::from_export(store, export.clone()); let extern_ = Extern::from_export(store, export);
(name.to_string(), extern_) (name, extern_)
}) })
.collect::<Exports>(); .collect::<Exports>();

View File

@@ -34,6 +34,7 @@ pub use wasmer_compiler::{Features, Target};
pub use wasmer_engine::{ pub use wasmer_engine::{
DeserializeError, Engine, InstantiationError, LinkError, RuntimeError, SerializeError, DeserializeError, Engine, InstantiationError, LinkError, RuntimeError, SerializeError,
}; };
pub use wasmer_runtime::MemoryError;
// The compilers are mutually exclusive // The compilers are mutually exclusive
#[cfg(any( #[cfg(any(

View File

@@ -6,8 +6,10 @@ use std::io;
use std::path::Path; use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use thiserror::Error; use thiserror::Error;
use wasmer_compiler::{CompileError, WasmError}; use wasmer_compiler::CompileError;
use wasmer_engine::{CompiledModule, DeserializeError, Engine, Resolver, SerializeError}; #[cfg(feature = "wat")]
use wasmer_compiler::WasmError;
use wasmer_engine::{CompiledModule, DeserializeError, Resolver, SerializeError};
use wasmer_runtime::{ExportsIterator, ImportsIterator, InstanceHandle, Module as ModuleInfo}; use wasmer_runtime::{ExportsIterator, ImportsIterator, InstanceHandle, Module as ModuleInfo};
#[derive(Error, Debug)] #[derive(Error, Debug)]
@@ -74,6 +76,7 @@ impl Module {
/// let bytes: Vec<u8> = vec![]; /// let bytes: Vec<u8> = vec![];
/// let module = Module::new(&store, bytes)?; /// let module = Module::new(&store, bytes)?;
/// ``` /// ```
#[allow(unreachable_code)]
pub fn new(store: &Store, bytes: impl AsRef<[u8]>) -> Result<Module, CompileError> { pub fn new(store: &Store, bytes: impl AsRef<[u8]>) -> Result<Module, CompileError> {
#[cfg(feature = "wat")] #[cfg(feature = "wat")]
{ {
@@ -206,7 +209,7 @@ impl Module {
Ok(Self::from_compiled_module(store, compiled)) Ok(Self::from_compiled_module(store, compiled))
} }
fn from_compiled_module(store: &Store, compiled: Arc<CompiledModule>) -> Self { fn from_compiled_module(store: &Store, compiled: Arc<dyn CompiledModule>) -> Self {
Module { Module {
store: store.clone(), store: store.clone(),
compiled, compiled,

View File

@@ -272,7 +272,7 @@ mod test {
// create a memory // create a memory
let store = Store::default(); let store = Store::default();
let memory_descriptor = MemoryType::new(1, Some(1), false); let memory_descriptor = MemoryType::new(1, Some(1), false);
let memory = Memory::new(&store, memory_descriptor); let memory = Memory::new(&store, memory_descriptor).unwrap();
// test that basic access works and that len = 0 works, but oob does not // test that basic access works and that len = 0 works, but oob does not
let start_wasm_ptr: WasmPtr<u8> = WasmPtr::new(0); let start_wasm_ptr: WasmPtr<u8> = WasmPtr::new(0);

View File

@@ -1,7 +1,9 @@
#[cfg(all(feature = "compiler", feature = "engine"))]
use crate::tunables::Tunables; use crate::tunables::Tunables;
use std::sync::Arc; #[cfg(all(feature = "compiler", feature = "engine"))]
#[cfg(feature = "compiler")]
use wasmer_compiler::CompilerConfig; use wasmer_compiler::CompilerConfig;
use std::sync::Arc;
use wasmer_engine::Engine; use wasmer_engine::Engine;
#[derive(Clone)] #[derive(Clone)]

View File

@@ -3,6 +3,7 @@ use std::cmp::min;
use target_lexicon::{OperatingSystem, PointerWidth, Triple, HOST}; use target_lexicon::{OperatingSystem, PointerWidth, Triple, HOST};
use wasm_common::{MemoryType, Pages, TableType}; use wasm_common::{MemoryType, Pages, TableType};
use wasmer_engine::Tunables as BaseTunables; use wasmer_engine::Tunables as BaseTunables;
use wasmer_runtime::MemoryError;
use wasmer_runtime::{LinearMemory, Table}; use wasmer_runtime::{LinearMemory, Table};
use wasmer_runtime::{MemoryPlan, MemoryStyle, TablePlan, TableStyle}; use wasmer_runtime::{MemoryPlan, MemoryStyle, TablePlan, TableStyle};
@@ -93,7 +94,7 @@ impl BaseTunables for Tunables {
} }
/// Create a memory given a memory type /// Create a memory given a memory type
fn create_memory(&self, plan: MemoryPlan) -> Result<LinearMemory, String> { fn create_memory(&self, plan: MemoryPlan) -> Result<LinearMemory, MemoryError> {
LinearMemory::new(&plan) LinearMemory::new(&plan)
} }

View File

@@ -64,6 +64,9 @@ impl ValAnyFunc for Val {
let export = wasmer_runtime::ExportFunction { let export = wasmer_runtime::ExportFunction {
address: item.func_ptr, address: item.func_ptr,
signature: item.type_index, signature: item.type_index,
// All functions in tables are already Static (as dynamic functions
// are converted to use the trampolines with static signatures).
kind: wasmer_runtime::VMFunctionKind::Static,
vmctx: item.vmctx, vmctx: item.vmctx,
}; };
let f = Function::from_export(store, export); let f = Function::from_export(store, export);

View File

@@ -16,7 +16,7 @@ crate-type = ["cdylib", "staticlib"]
[dependencies] [dependencies]
lazy_static = "1" lazy_static = "1"
libc = "0.2.60" libc = { version = "0.2.70", default-features = false }
# for generating code in the same way thot the wasm-c-api does # for generating code in the same way thot the wasm-c-api does
# Commented out for now until we can find a solution to the exported function problem # Commented out for now until we can find a solution to the exported function problem
# wasmer-wasm-c-api = { version = "0.16.2", path = "crates/wasm-c-api" } # wasmer-wasm-c-api = { version = "0.16.2", path = "crates/wasm-c-api" }

View File

@@ -1,12 +1,11 @@
//! Read runtime errors. //! Read runtime errors.
use libc::{c_char, c_int}; use libc::{c_char, c_int};
use std::{ use std::cell::RefCell;
cell::RefCell, use std::error::Error;
error::Error, use std::fmt::{self, Display, Formatter};
fmt::{self, Display, Formatter}, use std::ptr::{self, NonNull};
ptr, slice, use std::slice;
};
thread_local! { thread_local! {
static LAST_ERROR: RefCell<Option<Box<dyn Error>>> = RefCell::new(None); static LAST_ERROR: RefCell<Option<Box<dyn Error>>> = RefCell::new(None);
@@ -66,11 +65,16 @@ pub extern "C" fn wasmer_last_error_length() -> c_int {
/// } /// }
/// ``` /// ```
#[no_mangle] #[no_mangle]
pub unsafe extern "C" fn wasmer_last_error_message(buffer: *mut c_char, length: c_int) -> c_int { pub unsafe extern "C" fn wasmer_last_error_message(
if buffer.is_null() { buffer: Option<NonNull<c_char>>,
length: c_int,
) -> c_int {
let buffer = if let Some(buffer_inner) = buffer {
buffer_inner
} else {
// buffer pointer is null // buffer pointer is null
return -1; return -1;
} };
let error_message = match take_last_error() { let error_message = match take_last_error() {
Some(err) => err.to_string(), Some(err) => err.to_string(),
@@ -84,7 +88,7 @@ pub unsafe extern "C" fn wasmer_last_error_message(buffer: *mut c_char, length:
return -1; return -1;
} }
let buffer = slice::from_raw_parts_mut(buffer as *mut u8, length); let buffer = slice::from_raw_parts_mut(buffer.cast::<u8>().as_ptr(), length);
ptr::copy_nonoverlapping( ptr::copy_nonoverlapping(
error_message.as_ptr(), error_message.as_ptr(),

View File

@@ -23,7 +23,7 @@ pub(crate) struct NamedExport {
pub(crate) export_type: ExportType, pub(crate) export_type: ExportType,
/// The instance that holds the export. /// The instance that holds the export.
pub(crate) instance: *mut Instance, pub(crate) instance: NonNull<Instance>,
} }
/// Opaque pointer to `ImportType`. /// Opaque pointer to `ImportType`.
@@ -400,7 +400,7 @@ pub unsafe extern "C" fn wasmer_export_to_memory(
memory: *mut *mut wasmer_memory_t, memory: *mut *mut wasmer_memory_t,
) -> wasmer_result_t { ) -> wasmer_result_t {
let named_export = &*(export as *const NamedExport); let named_export = &*(export as *const NamedExport);
let instance = &*named_export.instance; let instance = named_export.instance.as_ref();
if let Ok(exported_memory) = instance if let Ok(exported_memory) = instance
.exports .exports
@@ -476,7 +476,7 @@ pub unsafe extern "C" fn wasmer_export_func_call(
let results: &mut [wasmer_value_t] = slice::from_raw_parts_mut(results, results_len as usize); let results: &mut [wasmer_value_t] = slice::from_raw_parts_mut(results, results_len as usize);
let instance = &*named_export.instance; let instance = named_export.instance.as_ref();
let f: &Function = match instance.exports.get(&named_export.export_type.name()) { let f: &Function = match instance.exports.get(&named_export.export_type.name()) {
Ok(f) => f, Ok(f) => f,
Err(err) => { Err(err) => {

View File

@@ -1,6 +1,8 @@
//! Create, set, get and destroy global variables of an instance. //! Create, set, get and destroy global variables of an instance.
use crate::error::update_last_error;
use crate::value::{wasmer_value_t, wasmer_value_tag}; use crate::value::{wasmer_value_t, wasmer_value_tag};
use std::ptr::NonNull;
use wasmer::Global; use wasmer::Global;
#[repr(C)] #[repr(C)]
@@ -17,10 +19,7 @@ pub struct wasmer_global_t;
/// Creates a new Global and returns a pointer to it. /// Creates a new Global and returns a pointer to it.
/// The caller owns the object and should call `wasmer_global_destroy` to free it. /// The caller owns the object and should call `wasmer_global_destroy` to free it.
#[no_mangle] #[no_mangle]
pub unsafe extern "C" fn wasmer_global_new( pub extern "C" fn wasmer_global_new(value: wasmer_value_t, mutable: bool) -> *mut wasmer_global_t {
value: wasmer_value_t,
mutable: bool,
) -> *mut wasmer_global_t {
let store = crate::get_global_store(); let store = crate::get_global_store();
let global = if mutable { let global = if mutable {
Global::new_mut(store, value.into()) Global::new_mut(store, value.into())
@@ -33,8 +32,8 @@ pub unsafe extern "C" fn wasmer_global_new(
/// Gets the value stored by the given Global /// Gets the value stored by the given Global
#[allow(clippy::cast_ptr_alignment)] #[allow(clippy::cast_ptr_alignment)]
#[no_mangle] #[no_mangle]
pub extern "C" fn wasmer_global_get(global: *mut wasmer_global_t) -> wasmer_value_t { pub unsafe extern "C" fn wasmer_global_get(global: *mut wasmer_global_t) -> wasmer_value_t {
let global = unsafe { &*(global as *mut Global) }; let global = &*(global as *mut Global);
let value: wasmer_value_t = global.get().into(); let value: wasmer_value_t = global.get().into();
value value
} }
@@ -42,18 +41,22 @@ pub extern "C" fn wasmer_global_get(global: *mut wasmer_global_t) -> wasmer_valu
/// Sets the value stored by the given Global /// Sets the value stored by the given Global
#[allow(clippy::cast_ptr_alignment)] #[allow(clippy::cast_ptr_alignment)]
#[no_mangle] #[no_mangle]
pub extern "C" fn wasmer_global_set(global: *mut wasmer_global_t, value: wasmer_value_t) { pub unsafe extern "C" fn wasmer_global_set(global: *mut wasmer_global_t, value: wasmer_value_t) {
let global = unsafe { &*(global as *mut Global) }; let global = &*(global as *mut Global);
global.set(value.into()); if let Err(err) = global.set(value.into()) {
update_last_error(err);
// can't return an error without breaking the API, probaly a safe change
// return wasmer_result_t::WASMER_ERROR;
}
} }
/// Returns a descriptor (type, mutability) of the given Global /// Returns a descriptor (type, mutability) of the given Global
#[allow(clippy::cast_ptr_alignment)] #[allow(clippy::cast_ptr_alignment)]
#[no_mangle] #[no_mangle]
pub extern "C" fn wasmer_global_get_descriptor( pub unsafe extern "C" fn wasmer_global_get_descriptor(
global: *mut wasmer_global_t, global: *mut wasmer_global_t,
) -> wasmer_global_descriptor_t { ) -> wasmer_global_descriptor_t {
let global = unsafe { &*(global as *mut Global) }; let global = &*(global as *mut Global);
let descriptor = global.ty(); let descriptor = global.ty();
wasmer_global_descriptor_t { wasmer_global_descriptor_t {
mutable: descriptor.mutability.into(), mutable: descriptor.mutability.into(),
@@ -64,8 +67,8 @@ pub extern "C" fn wasmer_global_get_descriptor(
/// Frees memory for the given Global /// Frees memory for the given Global
#[allow(clippy::cast_ptr_alignment)] #[allow(clippy::cast_ptr_alignment)]
#[no_mangle] #[no_mangle]
pub extern "C" fn wasmer_global_destroy(global: *mut wasmer_global_t) { pub unsafe extern "C" fn wasmer_global_destroy(global: Option<NonNull<wasmer_global_t>>) {
if !global.is_null() { if let Some(global_inner) = global {
unsafe { Box::from_raw(global as *mut Global) }; Box::from_raw(global_inner.cast::<Global>().as_ptr());
} }
} }

View File

@@ -10,11 +10,11 @@ use crate::{
wasmer_byte_array, wasmer_result_t, wasmer_byte_array, wasmer_result_t,
}; };
use libc::c_uint; use libc::c_uint;
use std::ptr::NonNull;
use std::{ use std::{
//convert::TryFrom, //convert::TryFrom,
ffi::c_void, ffi::c_void,
os::raw::c_char, os::raw::c_char,
ptr,
slice, slice,
//sync::Arc, //sync::Arc,
}; };
@@ -58,10 +58,11 @@ pub struct wasmer_import_object_iter_t;
/// See also `wasmer_import_object_append` /// See also `wasmer_import_object_append`
#[allow(clippy::cast_ptr_alignment)] #[allow(clippy::cast_ptr_alignment)]
#[no_mangle] #[no_mangle]
pub unsafe extern "C" fn wasmer_import_object_new() -> *mut wasmer_import_object_t { pub extern "C" fn wasmer_import_object_new() -> NonNull<wasmer_import_object_t> {
let import_object = Box::new(ImportObject::new()); let import_object = Box::new(ImportObject::new());
Box::into_raw(import_object) as *mut wasmer_import_object_t // TODO: use `Box::into_raw_non_null` when it becomes stable
unsafe { NonNull::new_unchecked(Box::into_raw(import_object) as *mut wasmer_import_object_t) }
} }
#[cfg(feature = "wasi")] #[cfg(feature = "wasi")]
@@ -319,15 +320,17 @@ pub unsafe extern "C" fn wasmer_import_object_iter_next(
/// not return any new data /// not return any new data
#[no_mangle] #[no_mangle]
pub unsafe extern "C" fn wasmer_import_object_iter_at_end( pub unsafe extern "C" fn wasmer_import_object_iter_at_end(
import_object_iter: *mut wasmer_import_object_iter_t, import_object_iter: Option<NonNull<wasmer_import_object_iter_t>>,
) -> bool { ) -> bool {
if import_object_iter.is_null() { let mut import_object_iter = if let Some(import_object_iter) = import_object_iter {
import_object_iter.cast::<WasmerImportObjectIterator>()
} else {
update_last_error(CApiError { update_last_error(CApiError {
msg: "import_object_iter must not be null".to_owned(), msg: "import_object_iter must not be null".to_owned(),
}); });
return true; return true;
} };
let iter = &mut *(import_object_iter as *mut WasmerImportObjectIterator); let iter = import_object_iter.as_mut();
iter.0.peek().is_none() iter.0.peek().is_none()
} }
@@ -335,10 +338,14 @@ pub unsafe extern "C" fn wasmer_import_object_iter_at_end(
/// Frees the memory allocated by `wasmer_import_object_iterate_functions` /// Frees the memory allocated by `wasmer_import_object_iterate_functions`
#[no_mangle] #[no_mangle]
pub unsafe extern "C" fn wasmer_import_object_iter_destroy( pub unsafe extern "C" fn wasmer_import_object_iter_destroy(
import_object_iter: *mut wasmer_import_object_iter_t, import_object_iter: Option<NonNull<wasmer_import_object_iter_t>>,
) { ) {
if !import_object_iter.is_null() { if let Some(import_object_iter) = import_object_iter {
let _ = Box::from_raw(import_object_iter as *mut WasmerImportObjectIterator); let _ = Box::from_raw(
import_object_iter
.cast::<WasmerImportObjectIterator>()
.as_ptr(),
);
} }
} }
@@ -348,13 +355,16 @@ pub unsafe extern "C" fn wasmer_import_object_iter_destroy(
/// it only frees memory allocated while querying a `wasmer_import_object_t`. /// it only frees memory allocated while querying a `wasmer_import_object_t`.
#[no_mangle] #[no_mangle]
pub unsafe extern "C" fn wasmer_import_object_imports_destroy( pub unsafe extern "C" fn wasmer_import_object_imports_destroy(
imports: *mut wasmer_import_t, imports: Option<NonNull<wasmer_import_t>>,
imports_len: u32, imports_len: u32,
) { ) {
if imports.is_null() { let imports = if let Some(imp) = imports {
imp
} else {
return; return;
} };
let imports: &[wasmer_import_t] = &*slice::from_raw_parts_mut(imports, imports_len as usize); let imports: &[wasmer_import_t] =
&*slice::from_raw_parts_mut(imports.as_ptr(), imports_len as usize);
for import in imports { for import in imports {
let _namespace: Vec<u8> = Vec::from_raw_parts( let _namespace: Vec<u8> = Vec::from_raw_parts(
import.module_name.bytes as *mut u8, import.module_name.bytes as *mut u8,
@@ -459,13 +469,14 @@ pub unsafe extern "C" fn wasmer_import_object_extend(
#[allow(clippy::cast_ptr_alignment)] #[allow(clippy::cast_ptr_alignment)]
#[no_mangle] #[no_mangle]
pub unsafe extern "C" fn wasmer_import_descriptors( pub unsafe extern "C" fn wasmer_import_descriptors(
module: *const wasmer_module_t, module: Option<&wasmer_module_t>,
import_descriptors: *mut *mut wasmer_import_descriptors_t, import_descriptors: *mut *mut wasmer_import_descriptors_t,
) { ) {
if module.is_null() { let module = if let Some(module) = module {
&*(module as *const wasmer_module_t as *const Module)
} else {
return; return;
} };
let module = &*(module as *const Module);
let descriptors = module.imports().collect::<Vec<ImportType>>(); let descriptors = module.imports().collect::<Vec<ImportType>>();
let named_import_descriptors: Box<NamedImportDescriptors> = let named_import_descriptors: Box<NamedImportDescriptors> =
@@ -480,10 +491,10 @@ pub struct NamedImportDescriptors(Vec<ImportType>);
#[allow(clippy::cast_ptr_alignment)] #[allow(clippy::cast_ptr_alignment)]
#[no_mangle] #[no_mangle]
pub extern "C" fn wasmer_import_descriptors_destroy( pub extern "C" fn wasmer_import_descriptors_destroy(
import_descriptors: *mut wasmer_import_descriptors_t, import_descriptors: Option<NonNull<wasmer_import_descriptors_t>>,
) { ) {
if !import_descriptors.is_null() { if let Some(id) = import_descriptors {
unsafe { Box::from_raw(import_descriptors as *mut NamedImportDescriptors) }; unsafe { Box::from_raw(id.cast::<NamedImportDescriptors>().as_ptr()) };
} }
} }
@@ -491,27 +502,29 @@ pub extern "C" fn wasmer_import_descriptors_destroy(
#[allow(clippy::cast_ptr_alignment)] #[allow(clippy::cast_ptr_alignment)]
#[no_mangle] #[no_mangle]
pub unsafe extern "C" fn wasmer_import_descriptors_len( pub unsafe extern "C" fn wasmer_import_descriptors_len(
exports: *mut wasmer_import_descriptors_t, exports: Option<NonNull<wasmer_import_descriptors_t>>,
) -> c_uint { ) -> c_uint {
if exports.is_null() { let exports = if let Some(exports) = exports {
exports.cast::<NamedImportDescriptors>()
} else {
return 0; return 0;
} };
(*(exports as *mut NamedImportDescriptors)).0.len() as c_uint exports.as_ref().0.len() as c_uint
} }
/// Gets import descriptor by index /// Gets import descriptor by index
#[allow(clippy::cast_ptr_alignment)] #[allow(clippy::cast_ptr_alignment)]
#[no_mangle] #[no_mangle]
pub unsafe extern "C" fn wasmer_import_descriptors_get( pub unsafe extern "C" fn wasmer_import_descriptors_get(
import_descriptors: *mut wasmer_import_descriptors_t, import_descriptors: Option<NonNull<wasmer_import_descriptors_t>>,
idx: c_uint, idx: c_uint,
) -> *mut wasmer_import_descriptor_t { ) -> Option<NonNull<wasmer_import_descriptor_t>> {
if import_descriptors.is_null() { let mut nid = import_descriptors?.cast::<NamedImportDescriptors>();
return ptr::null_mut(); let named_import_descriptors = nid.as_mut();
} Some(
let named_import_descriptors = &mut *(import_descriptors as *mut NamedImportDescriptors); NonNull::from(&mut named_import_descriptors.0[idx as usize])
&mut (*named_import_descriptors).0[idx as usize] as *mut ImportType .cast::<wasmer_import_descriptor_t>(),
as *mut wasmer_import_descriptor_t )
} }
/// Gets name for the import descriptor /// Gets name for the import descriptor
@@ -754,16 +767,18 @@ pub unsafe extern "C" fn wasmer_import_func_returns_arity(
/// Frees memory for the given Func /// Frees memory for the given Func
#[allow(clippy::cast_ptr_alignment)] #[allow(clippy::cast_ptr_alignment)]
#[no_mangle] #[no_mangle]
pub extern "C" fn wasmer_import_func_destroy(func: *mut wasmer_import_func_t) { pub unsafe extern "C" fn wasmer_import_func_destroy(func: Option<NonNull<wasmer_import_func_t>>) {
if !func.is_null() { if let Some(func) = func {
unsafe { Box::from_raw(func as *mut Function) }; Box::from_raw(func.cast::<Function>().as_ptr());
} }
} }
/// Frees memory of the given ImportObject /// Frees memory of the given ImportObject
#[no_mangle] #[no_mangle]
pub extern "C" fn wasmer_import_object_destroy(import_object: *mut wasmer_import_object_t) { pub unsafe extern "C" fn wasmer_import_object_destroy(
if !import_object.is_null() { import_object: Option<NonNull<wasmer_import_object_t>>,
unsafe { Box::from_raw(import_object as *mut ImportObject) }; ) {
if let Some(import_object) = import_object {
Box::from_raw(import_object.cast::<ImportObject>().as_ptr());
} }
} }

View File

@@ -213,7 +213,7 @@ pub unsafe extern "C" fn wasmer_wasi_generate_default_import_object() -> *mut wa
let mut wasi_env = wasi::WasiEnv::new(wasi_state); let mut wasi_env = wasi::WasiEnv::new(wasi_state);
// this API will now leak a `Memory` // this API will now leak a `Memory`
let memory_type = MemoryType::new(0, None, false); let memory_type = MemoryType::new(0, None, false);
let memory = Memory::new(store, memory_type); let memory = Memory::new(store, memory_type).expect("create memory");
wasi_env.set_memory(&memory); wasi_env.set_memory(&memory);
// TODO(mark): review lifetime of `Memory` here // TODO(mark): review lifetime of `Memory` here
let import_object = Box::new(wasi::generate_import_object_from_env( let import_object = Box::new(wasi::generate_import_object_from_env(

View File

@@ -11,7 +11,7 @@ use crate::{
use libc::{c_char, c_int, c_void}; use libc::{c_char, c_int, c_void};
use std::collections::HashMap; use std::collections::HashMap;
use std::ffi::CStr; use std::ffi::CStr;
use std::ptr; use std::ptr::NonNull;
use std::slice; use std::slice;
use wasmer::{ use wasmer::{
Exports, Extern, Function, Global, ImportObject, Instance, Memory, Module, Table, Val, Exports, Extern, Function, Global, ImportObject, Instance, Memory, Module, Table, Val,
@@ -99,17 +99,19 @@ pub struct wasmer_instance_context_t;
#[no_mangle] #[no_mangle]
pub unsafe extern "C" fn wasmer_instantiate( pub unsafe extern "C" fn wasmer_instantiate(
instance: *mut *mut wasmer_instance_t, instance: *mut *mut wasmer_instance_t,
wasm_bytes: *mut u8, wasm_bytes: Option<NonNull<u8>>,
wasm_bytes_len: u32, wasm_bytes_len: u32,
imports: *mut wasmer_import_t, imports: *mut wasmer_import_t,
imports_len: c_int, imports_len: c_int,
) -> wasmer_result_t { ) -> wasmer_result_t {
if wasm_bytes.is_null() { let wasm_bytes = if let Some(wasm_bytes_inner) = wasm_bytes {
wasm_bytes_inner
} else {
update_last_error(CApiError { update_last_error(CApiError {
msg: "wasm bytes ptr is null".to_string(), msg: "wasm bytes ptr is null".to_string(),
}); });
return wasmer_result_t::WASMER_ERROR; return wasmer_result_t::WASMER_ERROR;
} };
let imports: &[wasmer_import_t] = slice::from_raw_parts(imports, imports_len as usize); let imports: &[wasmer_import_t] = slice::from_raw_parts(imports, imports_len as usize);
let mut import_object = ImportObject::new(); let mut import_object = ImportObject::new();
let mut namespaces = HashMap::new(); let mut namespaces = HashMap::new();
@@ -166,7 +168,7 @@ pub unsafe extern "C" fn wasmer_instantiate(
import_object.register(module_name, namespace); import_object.register(module_name, namespace);
} }
let bytes: &[u8] = slice::from_raw_parts_mut(wasm_bytes, wasm_bytes_len as usize); let bytes: &[u8] = slice::from_raw_parts_mut(wasm_bytes.as_ptr(), wasm_bytes_len as usize);
let store = crate::get_global_store(); let store = crate::get_global_store();
let module_result = Module::from_binary(store, bytes); let module_result = Module::from_binary(store, bytes);
@@ -206,15 +208,11 @@ pub unsafe extern "C" fn wasmer_instantiate(
#[allow(clippy::cast_ptr_alignment)] #[allow(clippy::cast_ptr_alignment)]
#[no_mangle] #[no_mangle]
pub extern "C" fn wasmer_instance_context_get( pub extern "C" fn wasmer_instance_context_get(
instance: *mut wasmer_instance_t, instance: Option<NonNull<wasmer_instance_t>>,
) -> *const wasmer_instance_context_t { ) -> Option<&'static wasmer_instance_context_t> {
if instance.is_null() {
return ptr::null() as _;
}
unimplemented!("wasmer_instance_context_get: API changed") unimplemented!("wasmer_instance_context_get: API changed")
/* /*
let instance = unsafe { &*(instance as *const Instance) }; let instance = instance?.as_ref();
let context: *const Ctx = instance.context() as *const _; let context: *const Ctx = instance.context() as *const _;
context as *const wasmer_instance_context_t context as *const wasmer_instance_context_t
@@ -403,20 +401,24 @@ pub unsafe extern "C" fn wasmer_instance_call(
#[allow(clippy::cast_ptr_alignment)] #[allow(clippy::cast_ptr_alignment)]
#[no_mangle] #[no_mangle]
pub unsafe extern "C" fn wasmer_instance_exports( pub unsafe extern "C" fn wasmer_instance_exports(
instance: *mut wasmer_instance_t, instance: Option<NonNull<wasmer_instance_t>>,
exports: *mut *mut wasmer_exports_t, exports: *mut *mut wasmer_exports_t,
) { ) {
if instance.is_null() { let instance = if let Some(instance) = instance {
instance.cast::<Instance>()
} else {
return; return;
} };
let instance_ref = &mut *(instance as *mut Instance); let mut instance_ref_copy = instance.clone();
let mut exports_vec: Vec<NamedExport> = instance_ref let instance_ref = instance_ref_copy.as_mut();
let exports_vec: Vec<NamedExport> = instance_ref
.module() .module()
.exports() .exports()
.map(|export_type| NamedExport { .map(|export_type| NamedExport {
export_type, export_type,
instance: instance as *mut Instance, instance,
}) })
.collect(); .collect();
@@ -551,8 +553,8 @@ pub extern "C" fn wasmer_instance_context_data_get(
/// ``` /// ```
#[allow(clippy::cast_ptr_alignment)] #[allow(clippy::cast_ptr_alignment)]
#[no_mangle] #[no_mangle]
pub extern "C" fn wasmer_instance_destroy(instance: *mut wasmer_instance_t) { pub unsafe extern "C" fn wasmer_instance_destroy(instance: Option<NonNull<wasmer_instance_t>>) {
if !instance.is_null() { if let Some(instance_inner) = instance {
unsafe { Box::from_raw(instance as *mut Instance) }; Box::from_raw(instance_inner.cast::<Instance>().as_ptr());
} }
} }

View File

@@ -1,5 +1,7 @@
#![doc(html_favicon_url = "https://wasmer.io/static/icons/favicon.ico")] #![doc(html_favicon_url = "https://wasmer.io/static/icons/favicon.ico")]
#![doc(html_logo_url = "https://avatars3.githubusercontent.com/u/44205449?s=200&v=4")] #![doc(html_logo_url = "https://avatars3.githubusercontent.com/u/44205449?s=200&v=4")]
// temporary while in transition
#![allow(unused_variables)]
//! # Wasmer Runtime C API //! # Wasmer Runtime C API
//! //!

View File

@@ -69,9 +69,18 @@ pub unsafe extern "C" fn wasmer_memory_new(
}; };
let store = crate::get_global_store(); let store = crate::get_global_store();
let desc = MemoryType::new(Pages(limits.min), max, false); let desc = MemoryType::new(Pages(limits.min), max, false);
let new_memory = Memory::new(store, desc); match Memory::new(store, desc) {
*memory = Box::into_raw(Box::new(new_memory)) as *mut wasmer_memory_t; Ok(new_memory) => {
wasmer_result_t::WASMER_OK *memory = Box::into_raw(Box::new(new_memory)) as *mut wasmer_memory_t;
wasmer_result_t::WASMER_OK
}
Err(err) => {
update_last_error(CApiError {
msg: err.to_string(),
});
wasmer_result_t::WASMER_ERROR
}
}
} }
/// Grows a memory by the given number of pages (of 65Kb each). /// Grows a memory by the given number of pages (of 65Kb each).
@@ -105,8 +114,13 @@ pub extern "C" fn wasmer_memory_grow(memory: *mut wasmer_memory_t, delta: u32) -
let grow_result = memory.grow(Pages(delta)); let grow_result = memory.grow(Pages(delta));
match grow_result { match grow_result {
Some(_) => wasmer_result_t::WASMER_OK, Ok(_) => wasmer_result_t::WASMER_OK,
_ => wasmer_result_t::WASMER_ERROR, Err(err) => {
update_last_error(CApiError {
msg: err.to_string(),
});
wasmer_result_t::WASMER_ERROR
}
} }
} }

View File

@@ -196,7 +196,7 @@ pub unsafe extern "C" fn wasmer_module_serialize(
let module = &*(module as *const Module); let module = &*(module as *const Module);
match module.serialize() { match module.serialize() {
Ok(mut serialized_module) => { Ok(serialized_module) => {
let boxed_slice = serialized_module.into_boxed_slice(); let boxed_slice = serialized_module.into_boxed_slice();
*serialized_module_out = Box::into_raw(Box::new(boxed_slice)) as _; *serialized_module_out = Box::into_raw(Box::new(boxed_slice)) as _;
@@ -267,16 +267,17 @@ pub unsafe extern "C" fn wasmer_serialized_module_from_bytes(
#[no_mangle] #[no_mangle]
pub unsafe extern "C" fn wasmer_module_deserialize( pub unsafe extern "C" fn wasmer_module_deserialize(
module: *mut *mut wasmer_module_t, module: *mut *mut wasmer_module_t,
serialized_module: *const wasmer_serialized_module_t, serialized_module: Option<&wasmer_serialized_module_t>,
) -> wasmer_result_t { ) -> wasmer_result_t {
if serialized_module.is_null() { let serialized_module: &[u8] = if let Some(sm) = serialized_module {
&*(sm as *const wasmer_serialized_module_t as *const &[u8])
} else {
update_last_error(CApiError { update_last_error(CApiError {
msg: "`serialized_module` pointer is null".to_string(), msg: "`serialized_module` pointer is null".to_string(),
}); });
return wasmer_result_t::WASMER_ERROR; return wasmer_result_t::WASMER_ERROR;
} };
let serialized_module: &[u8] = &*(serialized_module as *const &[u8]);
let store = crate::get_global_store(); let store = crate::get_global_store();
match Module::deserialize(store, serialized_module) { match Module::deserialize(store, serialized_module) {

View File

@@ -1,6 +1,7 @@
//! Create, grow, destroy tables of an instance. //! Create, grow, destroy tables of an instance.
use crate::{error::update_last_error, wasmer_limits_t, wasmer_result_t}; use crate::{error::update_last_error, wasmer_limits_t, wasmer_result_t};
use std::ptr::NonNull;
use wasmer::{AnyRef, Table, TableType, Val, ValType}; use wasmer::{AnyRef, Table, TableType, Val, ValType};
#[repr(C)] #[repr(C)]
@@ -15,8 +16,8 @@ fn get_default_table_value(table_type: ValType) -> Val {
ValType::F32 => Val::F32(0.), ValType::F32 => Val::F32(0.),
ValType::F64 => Val::F64(0.), ValType::F64 => Val::F64(0.),
ValType::V128 => Val::V128(0), ValType::V128 => Val::V128(0),
ValType::AnyRef => Val::AnyRef(AnyRef::null()), // todo!("Figure out what the default AnyRef value is"), ValType::AnyRef => Val::AnyRef(AnyRef::null()),
ValType::FuncRef => Val::AnyRef(AnyRef::null()), //todo!("Figure out what the default FuncRef value is"), ValType::FuncRef => Val::AnyRef(AnyRef::null()),
} }
} }
@@ -65,8 +66,11 @@ pub unsafe extern "C" fn wasmer_table_new(
/// and `wasmer_last_error_message` to get an error message. /// and `wasmer_last_error_message` to get an error message.
#[allow(clippy::cast_ptr_alignment)] #[allow(clippy::cast_ptr_alignment)]
#[no_mangle] #[no_mangle]
pub extern "C" fn wasmer_table_grow(table: *mut wasmer_table_t, delta: u32) -> wasmer_result_t { pub unsafe extern "C" fn wasmer_table_grow(
let table = unsafe { &*(table as *mut Table) }; table: *mut wasmer_table_t,
delta: u32,
) -> wasmer_result_t {
let table = &*(table as *mut Table);
let table_type = table.ty().ty; let table_type = table.ty().ty;
let table_default_value = get_default_table_value(table_type); let table_default_value = get_default_table_value(table_type);
let delta_result = table.grow(delta, table_default_value); let delta_result = table.grow(delta, table_default_value);
@@ -82,16 +86,16 @@ pub extern "C" fn wasmer_table_grow(table: *mut wasmer_table_t, delta: u32) -> w
/// Returns the current length of the given Table /// Returns the current length of the given Table
#[allow(clippy::cast_ptr_alignment)] #[allow(clippy::cast_ptr_alignment)]
#[no_mangle] #[no_mangle]
pub extern "C" fn wasmer_table_length(table: *mut wasmer_table_t) -> u32 { pub unsafe extern "C" fn wasmer_table_length(table: *mut wasmer_table_t) -> u32 {
let table = unsafe { &*(table as *mut Table) }; let table = &*(table as *mut Table);
table.size() table.size()
} }
/// Frees memory for the given Table /// Frees memory for the given Table
#[allow(clippy::cast_ptr_alignment)] #[allow(clippy::cast_ptr_alignment)]
#[no_mangle] #[no_mangle]
pub extern "C" fn wasmer_table_destroy(table: *mut wasmer_table_t) { pub unsafe extern "C" fn wasmer_table_destroy(table: Option<NonNull<wasmer_table_t>>) {
if !table.is_null() { if let Some(table_inner) = table {
unsafe { Box::from_raw(table as *mut Table) }; Box::from_raw(table_inner.cast::<Table>().as_ptr());
} }
} }

View File

@@ -111,8 +111,7 @@ add_test(test-instantiate test-instantiate)
target_link_libraries(test-memory general ${WASMER_LIB}) target_link_libraries(test-memory general ${WASMER_LIB})
target_compile_options(test-memory PRIVATE ${COMPILER_OPTIONS}) target_compile_options(test-memory PRIVATE ${COMPILER_OPTIONS})
# TODO: reenable this test add_test(test-memory test-memory)
#add_test(test-memory test-memory)
target_link_libraries(test-module general ${WASMER_LIB}) target_link_libraries(test-module general ${WASMER_LIB})
target_compile_options(test-module PRIVATE ${COMPILER_OPTIONS}) target_compile_options(test-module PRIVATE ${COMPILER_OPTIONS})

View File

@@ -40,7 +40,7 @@ int main()
char *error_str = malloc(error_len); char *error_str = malloc(error_len);
wasmer_last_error_message(error_str, error_len); wasmer_last_error_message(error_str, error_len);
printf("Error str: `%s`\n", error_str); printf("Error str: `%s`\n", error_str);
assert(0 == strcmp(error_str, "Failed to add pages because would exceed maximum number of pages for the memory. Left: 22, Added: 15")); assert(0 == strcmp(error_str, "The memory could not grow: current size 12 pages, requested increase: 10 pages"));
free(error_str); free(error_str);
wasmer_memory_t *bad_memory = NULL; wasmer_memory_t *bad_memory = NULL;
@@ -58,7 +58,7 @@ int main()
char *error_str2 = malloc(error_len2); char *error_str2 = malloc(error_len2);
wasmer_last_error_message(error_str2, error_len2); wasmer_last_error_message(error_str2, error_len2);
printf("Error str 2: `%s`\n", error_str2); printf("Error str 2: `%s`\n", error_str2);
assert(0 == strcmp(error_str2, "Unable to create because the supplied descriptor is invalid: \"Max number of memory pages is less than the minimum number of pages\"")); assert(0 == strcmp(error_str2, "The memory plan is invalid because the maximum (10 pages) is less than the minimum (15 pages)"));
free(error_str2); free(error_str2);
printf("Destroy memory\n"); printf("Destroy memory\n");

View File

@@ -22,7 +22,7 @@ impl WasmHash {
/// is, in fact, a wasm module. /// is, in fact, a wasm module.
pub fn generate(wasm: &[u8]) -> Self { pub fn generate(wasm: &[u8]) -> Self {
let hash = blake3::hash(wasm); let hash = blake3::hash(wasm);
WasmHash::new(hash.into()) Self::new(hash.into())
} }
pub(crate) fn into_array(self) -> [u8; 32] { pub(crate) fn into_array(self) -> [u8; 32] {
@@ -56,7 +56,7 @@ impl FromStr for WasmHash {
)); ));
} }
use std::convert::TryInto; use std::convert::TryInto;
Ok(WasmHash(bytes[0..32].try_into().map_err(|e| { Ok(Self(bytes[0..32].try_into().map_err(|e| {
DeserializeError::Generic(format!("Could not get first 32 bytes: {}", e)) DeserializeError::Generic(format!("Could not get first 32 bytes: {}", e))
})?)) })?))
} }

View File

@@ -1,3 +1,8 @@
//! Wasmer Cranelift compiler build script.
//!
//! Sets the git revsion? for $PURPOSE
//! TODO(syrus): explain what's happening here
use std::process::Command; use std::process::Command;
use std::str; use std::str;

View File

@@ -4,16 +4,17 @@ use crate::address_map::get_function_address_map;
use crate::config::CraneliftConfig; use crate::config::CraneliftConfig;
use crate::func_environ::{get_func_name, FuncEnvironment}; use crate::func_environ::{get_func_name, FuncEnvironment};
use crate::sink::{RelocSink, TrapSink}; use crate::sink::{RelocSink, TrapSink};
use crate::trampoline::{make_wasm_trampoline, FunctionBuilderContext}; use crate::trampoline::{
use crate::translator::{ make_trampoline_dynamic_function, make_trampoline_function_call, FunctionBuilderContext,
compiled_function_unwind_info, irlibcall_to_libcall, irreloc_to_relocationkind,
signature_to_cranelift_ir, transform_jump_table, FuncTranslator,
}; };
use cranelift_codegen::ir::{self, ExternalName}; use crate::translator::{
compiled_function_unwind_info, signature_to_cranelift_ir, transform_jump_table, FuncTranslator,
};
use cranelift_codegen::ir;
use cranelift_codegen::print_errors::pretty_error; use cranelift_codegen::print_errors::pretty_error;
use cranelift_codegen::{binemit, isa, Context}; use cranelift_codegen::{binemit, isa, Context};
use rayon::prelude::{IntoParallelRefIterator, ParallelIterator}; use rayon::prelude::{IntoParallelRefIterator, ParallelIterator};
use wasm_common::entity::{EntityRef, PrimaryMap}; use wasm_common::entity::PrimaryMap;
use wasm_common::{ use wasm_common::{
Features, FunctionIndex, FunctionType, LocalFunctionIndex, MemoryIndex, SignatureIndex, Features, FunctionIndex, FunctionType, LocalFunctionIndex, MemoryIndex, SignatureIndex,
TableIndex, TableIndex,
@@ -21,11 +22,9 @@ use wasm_common::{
use wasmer_compiler::CompileError; use wasmer_compiler::CompileError;
use wasmer_compiler::{ use wasmer_compiler::{
Compilation, CompiledFunction, CompiledFunctionFrameInfo, Compiler, FunctionBody, Compilation, CompiledFunction, CompiledFunctionFrameInfo, Compiler, FunctionBody,
FunctionBodyData, SourceLoc, TrapInformation, FunctionBodyData,
}; };
use wasmer_compiler::{CompilerConfig, ModuleTranslationState, Target}; use wasmer_compiler::{CompilerConfig, ModuleTranslationState, Target};
use wasmer_compiler::{Relocation, RelocationTarget};
use wasmer_runtime::TrapCode;
use wasmer_runtime::{MemoryPlan, Module, TablePlan}; use wasmer_runtime::{MemoryPlan, Module, TablePlan};
/// A compiler that compiles a WebAssembly module with Cranelift, translating the Wasm to Cranelift IR, /// A compiler that compiles a WebAssembly module with Cranelift, translating the Wasm to Cranelift IR,
@@ -159,15 +158,37 @@ impl Compiler for CraneliftCompiler {
Ok(Compilation::new(functions, custom_sections)) Ok(Compilation::new(functions, custom_sections))
} }
fn compile_wasm_trampolines( fn compile_function_call_trampolines(
&self, &self,
signatures: &[FunctionType], signatures: &[FunctionType],
) -> Result<Vec<FunctionBody>, CompileError> { ) -> Result<Vec<FunctionBody>, CompileError> {
signatures signatures
.par_iter() .par_iter()
.map_init(FunctionBuilderContext::new, |mut cx, sig| { .map_init(FunctionBuilderContext::new, |mut cx, sig| {
make_wasm_trampoline(&*self.isa, &mut cx, sig, std::mem::size_of::<u128>()) make_trampoline_function_call(&*self.isa, &mut cx, sig)
}) })
.collect::<Result<Vec<_>, CompileError>>() .collect::<Result<Vec<_>, CompileError>>()
} }
fn compile_dynamic_function_trampolines(
&self,
module: &Module,
) -> Result<PrimaryMap<FunctionIndex, FunctionBody>, CompileError> {
use wasmer_runtime::VMOffsets;
let isa = self.isa();
let frontend_config = isa.frontend_config();
let offsets = VMOffsets::new(frontend_config.pointer_bytes(), module);
Ok(module
.functions
.values()
.take(module.num_imported_funcs)
.collect::<Vec<_>>()
.par_iter()
.map_init(FunctionBuilderContext::new, |mut cx, sig_index| {
make_trampoline_dynamic_function(&*self.isa, &module, &offsets, &mut cx, &sig_index)
})
.collect::<Result<Vec<_>, CompileError>>()?
.into_iter()
.collect::<PrimaryMap<FunctionIndex, FunctionBody>>())
}
} }

View File

@@ -58,7 +58,7 @@ pub use crate::compiler::CraneliftCompiler;
pub use crate::config::CraneliftConfig; pub use crate::config::CraneliftConfig;
pub use crate::debug::{FrameLayout, FrameLayoutChange, FrameLayouts}; pub use crate::debug::{FrameLayout, FrameLayoutChange, FrameLayouts};
pub use crate::debug::{ModuleMemoryOffset, ModuleVmctxInfo, ValueLabelsRanges}; pub use crate::debug::{ModuleMemoryOffset, ModuleVmctxInfo, ValueLabelsRanges};
pub use crate::trampoline::make_wasm_trampoline; pub use crate::trampoline::make_trampoline_function_call;
/// Version number of this crate. /// Version number of this crate.
pub const VERSION: &str = env!("CARGO_PKG_VERSION"); pub const VERSION: &str = env!("CARGO_PKG_VERSION");

View File

@@ -1,9 +1,6 @@
//! Support for compiling with Cranelift. //! Support for compiling with Cranelift.
use crate::translator::{ use crate::translator::{irlibcall_to_libcall, irreloc_to_relocationkind};
irlibcall_to_libcall, irreloc_to_relocationkind, signature_to_cranelift_ir,
transform_jump_table, FuncTranslator,
};
use cranelift_codegen::binemit; use cranelift_codegen::binemit;
use cranelift_codegen::ir::{self, ExternalName}; use cranelift_codegen::ir::{self, ExternalName};
use wasm_common::entity::EntityRef; use wasm_common::entity::EntityRef;
@@ -136,7 +133,7 @@ fn translate_ir_trapcode(trap: ir::TrapCode) -> TrapCode {
ir::TrapCode::BadConversionToInteger => TrapCode::BadConversionToInteger, ir::TrapCode::BadConversionToInteger => TrapCode::BadConversionToInteger,
ir::TrapCode::UnreachableCodeReached => TrapCode::UnreachableCodeReached, ir::TrapCode::UnreachableCodeReached => TrapCode::UnreachableCodeReached,
ir::TrapCode::Interrupt => TrapCode::Interrupt, ir::TrapCode::Interrupt => TrapCode::Interrupt,
ir::TrapCode::User(user_code) => unimplemented!("User trap code not supported"), ir::TrapCode::User(_user_code) => unimplemented!("User trap code not supported"),
// ir::TrapCode::User(user_code) => TrapCode::User(user_code), // ir::TrapCode::User(user_code) => TrapCode::User(user_code),
} }
} }

View File

@@ -0,0 +1,146 @@
//! A trampoline generator for calling dynamic host functions from Wasm.
use super::binemit::TrampolineRelocSink;
use crate::translator::{compiled_function_unwind_info, signature_to_cranelift_ir};
use cranelift_codegen::ir::{
types, ExternalName, Function, InstBuilder, MemFlags, StackSlotData, StackSlotKind,
};
use cranelift_codegen::isa::TargetIsa;
use cranelift_codegen::print_errors::pretty_error;
use cranelift_codegen::Context;
use cranelift_codegen::{binemit, ir};
use std::cmp;
use std::mem;
use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext};
use wasm_common::entity::EntityRef;
use wasm_common::SignatureIndex;
use wasmer_compiler::{CompileError, FunctionBody};
use wasmer_runtime::{Module, VMOffsets};
/// Create a trampoline for invoking a WebAssembly function.
pub fn make_trampoline_dynamic_function(
isa: &dyn TargetIsa,
module: &Module,
offsets: &VMOffsets,
fn_builder_ctx: &mut FunctionBuilderContext,
sig_index: &SignatureIndex,
) -> Result<FunctionBody, CompileError> {
let func_type = &module.signatures[*sig_index];
let pointer_type = isa.pointer_type();
let frontend_config = isa.frontend_config();
let signature = signature_to_cranelift_ir(func_type, &frontend_config);
let mut stub_sig = ir::Signature::new(frontend_config.default_call_conv);
// Add the caller `vmctx` parameter.
stub_sig.params.push(ir::AbiParam::special(
pointer_type,
ir::ArgumentPurpose::VMContext,
));
// Add the caller/callee `vmctx` parameter.
stub_sig.params.push(ir::AbiParam::new(pointer_type));
// Add the `sig_index` parameter.
stub_sig.params.push(ir::AbiParam::new(types::I32));
// Add the `values_vec` parameter.
stub_sig.params.push(ir::AbiParam::new(pointer_type));
// Compute the size of the values vector. The vmctx and caller vmctx are passed separately.
let value_size = mem::size_of::<u128>();
let values_vec_len =
(value_size * cmp::max(signature.params.len() - 2, signature.returns.len())) as u32;
let mut context = Context::new();
context.func = Function::with_name_signature(ExternalName::user(0, 0), signature.clone());
let ss = context.func.create_stack_slot(StackSlotData::new(
StackSlotKind::ExplicitSlot,
values_vec_len,
));
{
let mut builder = FunctionBuilder::new(&mut context.func, fn_builder_ctx);
let block0 = builder.create_block();
builder.append_block_params_for_function_params(block0);
builder.switch_to_block(block0);
builder.seal_block(block0);
let values_vec_ptr_val = builder.ins().stack_addr(pointer_type, ss, 0);
let mflags = MemFlags::trusted();
// We only get the non-vmctx arguments
for i in 2..signature.params.len() {
let val = builder.func.dfg.block_params(block0)[i];
builder.ins().store(
mflags,
val,
values_vec_ptr_val,
((i - 2) * value_size) as i32,
);
}
let block_params = builder.func.dfg.block_params(block0);
let vmctx_ptr_val = block_params[0];
let caller_vmctx_ptr_val = block_params[1];
// Get the signature index
let caller_sig_id = builder.ins().iconst(types::I32, sig_index.index() as i64);
let callee_args = vec![
vmctx_ptr_val,
caller_vmctx_ptr_val,
caller_sig_id,
values_vec_ptr_val,
];
let new_sig = builder.import_signature(stub_sig);
let mem_flags = ir::MemFlags::trusted();
let callee_value = builder.ins().load(
pointer_type,
mem_flags,
vmctx_ptr_val,
offsets.vmdynamicfunction_import_context_address() as i32,
);
builder
.ins()
.call_indirect(new_sig, callee_value, &callee_args);
let mflags = MemFlags::trusted();
let mut results = Vec::new();
for (i, r) in signature.returns.iter().enumerate() {
let load = builder.ins().load(
r.value_type,
mflags,
values_vec_ptr_val,
(i * value_size) as i32,
);
results.push(load);
}
builder.ins().return_(&results);
builder.finalize()
}
let mut code_buf = Vec::new();
let mut reloc_sink = TrampolineRelocSink {};
let mut trap_sink = binemit::NullTrapSink {};
let mut stackmap_sink = binemit::NullStackmapSink {};
context
.compile_and_emit(
isa,
&mut code_buf,
&mut reloc_sink,
&mut trap_sink,
&mut stackmap_sink,
)
.map_err(|error| CompileError::Codegen(pretty_error(&context.func, Some(isa), error)))?;
let unwind_info = compiled_function_unwind_info(isa, &context);
Ok(FunctionBody {
body: code_buf,
unwind_info,
})
}

View File

@@ -7,7 +7,7 @@
//! ``` //! ```
use super::binemit::TrampolineRelocSink; use super::binemit::TrampolineRelocSink;
use crate::translator::{ use crate::translator::{
compiled_function_unwind_info, signature_to_cranelift_ir, transform_jump_table, compiled_function_unwind_info, signature_to_cranelift_ir, /*transform_jump_table, */
}; };
use cranelift_codegen::ir::InstBuilder; use cranelift_codegen::ir::InstBuilder;
use cranelift_codegen::isa::TargetIsa; use cranelift_codegen::isa::TargetIsa;
@@ -15,15 +15,15 @@ use cranelift_codegen::print_errors::pretty_error;
use cranelift_codegen::Context; use cranelift_codegen::Context;
use cranelift_codegen::{binemit, ir}; use cranelift_codegen::{binemit, ir};
use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext}; use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext};
use std::mem;
use wasm_common::FunctionType; use wasm_common::FunctionType;
use wasmer_compiler::{CompileError, CompiledFunction, CompiledFunctionFrameInfo, FunctionBody}; use wasmer_compiler::{CompileError, FunctionBody};
/// Create a trampoline for invoking a WebAssembly function. /// Create a trampoline for invoking a WebAssembly function.
pub fn make_wasm_trampoline( pub fn make_trampoline_function_call(
isa: &dyn TargetIsa, isa: &dyn TargetIsa,
fn_builder_ctx: &mut FunctionBuilderContext, fn_builder_ctx: &mut FunctionBuilderContext,
func_type: &FunctionType, func_type: &FunctionType,
value_size: usize,
) -> Result<FunctionBody, CompileError> { ) -> Result<FunctionBody, CompileError> {
let pointer_type = isa.pointer_type(); let pointer_type = isa.pointer_type();
let frontend_config = isa.frontend_config(); let frontend_config = isa.frontend_config();
@@ -49,6 +49,7 @@ pub fn make_wasm_trampoline(
context.func = ir::Function::with_name_signature(ir::ExternalName::user(0, 0), wrapper_sig); context.func = ir::Function::with_name_signature(ir::ExternalName::user(0, 0), wrapper_sig);
context.func.collect_frame_layout_info(); context.func.collect_frame_layout_info();
let value_size = mem::size_of::<u128>();
{ {
let mut builder = FunctionBuilder::new(&mut context.func, fn_builder_ctx); let mut builder = FunctionBuilder::new(&mut context.func, fn_builder_ctx);
let block0 = builder.create_block(); let block0 = builder.create_block();
@@ -86,7 +87,7 @@ pub fn make_wasm_trampoline(
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let new_sig = builder.import_signature(signature.clone()); let new_sig = builder.import_signature(signature);
let call = builder let call = builder
.ins() .ins()

View File

@@ -1,10 +1,10 @@
#![allow(missing_docs)] #![allow(missing_docs)]
// mod host; mod dynamic_function;
mod wasm; mod function_call;
// pub use host::make_host_trampoline; pub use self::dynamic_function::make_trampoline_dynamic_function;
pub use self::wasm::make_wasm_trampoline; pub use self::function_call::make_trampoline_function_call;
// TODO: Delete // TODO: Delete
pub mod ir { pub mod ir {

View File

@@ -16,6 +16,7 @@ use wasmer_compiler::WasmResult;
/// The value of a WebAssembly global variable. /// The value of a WebAssembly global variable.
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
pub enum GlobalVariable { pub enum GlobalVariable {
#[allow(dead_code)]
/// This is a constant global with a value known at compile time. /// This is a constant global with a value known at compile time.
Const(ir::Value), Const(ir::Value),
@@ -29,10 +30,12 @@ pub enum GlobalVariable {
ty: ir::Type, ty: ir::Type,
}, },
#[allow(dead_code)]
/// This is a global variable that needs to be handled by the environment. /// This is a global variable that needs to be handled by the environment.
Custom, Custom,
} }
#[allow(dead_code)]
/// How to return from functions. /// How to return from functions.
#[derive(Copy, Clone, PartialEq, Eq, Debug)] #[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum ReturnMode { pub enum ReturnMode {

View File

@@ -216,15 +216,6 @@ pub struct FuncTranslationState {
functions: HashMap<FunctionIndex, (ir::FuncRef, usize)>, functions: HashMap<FunctionIndex, (ir::FuncRef, usize)>,
} }
// Public methods that are exposed to non-`wasmer_compiler` API consumers.
impl FuncTranslationState {
/// True if the current translation state expresses reachable code, false if it is unreachable.
#[inline]
pub fn reachable(&self) -> bool {
self.reachable
}
}
impl FuncTranslationState { impl FuncTranslationState {
/// Construct a new, empty, `FuncTranslationState` /// Construct a new, empty, `FuncTranslationState`
pub(crate) fn new() -> Self { pub(crate) fn new() -> Self {

View File

@@ -21,13 +21,13 @@ pub fn signature_to_cranelift_ir(
target_config: &TargetFrontendConfig, target_config: &TargetFrontendConfig,
) -> ir::Signature { ) -> ir::Signature {
let mut sig = ir::Signature::new(target_config.default_call_conv); let mut sig = ir::Signature::new(target_config.default_call_conv);
sig.params.extend(signature.params().iter().map(|ty| { sig.params.extend(signature.params().iter().map(|&ty| {
let cret_arg: ir::Type = type_to_irtype(ty.clone(), target_config) let cret_arg: ir::Type = type_to_irtype(ty, target_config)
.expect("only numeric types are supported in function signatures"); .expect("only numeric types are supported in function signatures");
AbiParam::new(cret_arg) AbiParam::new(cret_arg)
})); }));
sig.returns.extend(signature.results().iter().map(|ty| { sig.returns.extend(signature.results().iter().map(|&ty| {
let cret_arg: ir::Type = type_to_irtype(ty.clone(), target_config) let cret_arg: ir::Type = type_to_irtype(ty, target_config)
.expect("only numeric types are supported in function signatures"); .expect("only numeric types are supported in function signatures");
AbiParam::new(cret_arg) AbiParam::new(cret_arg)
})); }));

View File

@@ -16,7 +16,7 @@ wasm-common = { path = "../wasm-common", version = "0.16.2" }
target-lexicon = { version = "0.10.0", default-features = false } target-lexicon = { version = "0.10.0", default-features = false }
smallvec = "1" smallvec = "1"
goblin = "0.2" goblin = "0.2"
libc = "0.2.69" libc = { version = "0.2.70", default-features = false }
byteorder = "1" byteorder = "1"
itertools = "0.9.0" itertools = "0.9.0"
rayon = "1.3.0" rayon = "1.3.0"
@@ -24,14 +24,14 @@ rayon = "1.3.0"
[target.'cfg(target_arch = "x86_64")'.dependencies.inkwell] [target.'cfg(target_arch = "x86_64")'.dependencies.inkwell]
#version = "0.1.0-llvm8sample" #version = "0.1.0-llvm8sample"
git = "https://github.com/theDan64/inkwell" git = "https://github.com/theDan64/inkwell"
rev = "af1846fd8aea530cef9a59170715e5c6c64346f6" rev = "1bfecc0a095d7ffdfa20a64630864f0297349508"
default-features = false default-features = false
features = ["llvm10-0", "target-x86"] features = ["llvm10-0", "target-x86"]
[target.'cfg(target_arch = "aarch64")'.dependencies.inkwell] [target.'cfg(target_arch = "aarch64")'.dependencies.inkwell]
#version = "0.1.0-llvm8sample" #version = "0.1.0-llvm8sample"
git = "https://github.com/theDan64/inkwell" git = "https://github.com/theDan64/inkwell"
rev = "af1846fd8aea530cef9a59170715e5c6c64346f6" rev = "1bfecc0a095d7ffdfa20a64630864f0297349508"
default-features = false default-features = false
features = ["llvm10-0", "target-aarch64"] features = ["llvm10-0", "target-aarch64"]

View File

@@ -11,7 +11,7 @@ use std::path::PathBuf;
use std::process::Command; use std::process::Command;
// Version of the llvm-sys crate that we (through inkwell) depend on. // Version of the llvm-sys crate that we (through inkwell) depend on.
const LLVM_SYS_MAJOR_VERSION: &str = "80"; const LLVM_SYS_MAJOR_VERSION: &str = "100";
const LLVM_SYS_MINOR_VERSION: &str = "0"; const LLVM_SYS_MINOR_VERSION: &str = "0";
// Environment variables that can guide compilation // Environment variables that can guide compilation

View File

@@ -72,6 +72,7 @@ impl Compiler for LLVMCompiler {
let mut readonly_section = CustomSection { let mut readonly_section = CustomSection {
protection: CustomSectionProtection::Read, protection: CustomSectionProtection::Read,
bytes: SectionBody::default(), bytes: SectionBody::default(),
relocations: vec![],
}; };
for (func_index, _) in &module.functions { for (func_index, _) in &module.functions {
@@ -81,6 +82,7 @@ impl Compiler for LLVMCompiler {
.cloned() .cloned()
.unwrap_or_else(|| format!("fn{}", func_index.index())); .unwrap_or_else(|| format!("fn{}", func_index.index()));
} }
let mut module_custom_sections = PrimaryMap::new();
let mut functions = function_body_inputs let mut functions = function_body_inputs
.into_iter() .into_iter()
.collect::<Vec<(LocalFunctionIndex, &FunctionBodyData<'_>)>>() .collect::<Vec<(LocalFunctionIndex, &FunctionBodyData<'_>)>>()
@@ -100,46 +102,41 @@ impl Compiler for LLVMCompiler {
}) })
.collect::<Result<Vec<_>, CompileError>>()? .collect::<Result<Vec<_>, CompileError>>()?
.into_iter() .into_iter()
.map(|(mut function, local_relocations, custom_sections)| { .map(|(mut compiled_function, mut function_custom_sections)| {
/// We collect the sections data let first_section = module_custom_sections.len() as u32;
for (local_idx, custom_section) in custom_sections.iter().enumerate() { for (_, custom_section) in function_custom_sections.iter() {
let local_idx = local_idx as u32; // TODO: remove this call to clone()
// TODO: these section numbers are potentially wrong, if there's let mut custom_section = custom_section.clone();
// no Read and only a ReadExecute then ReadExecute is 0. for mut reloc in &mut custom_section.relocations {
let (ref mut section, section_num) = match &custom_section.protection { match reloc.reloc_target {
CustomSectionProtection::Read => { RelocationTarget::CustomSection(index) => {
(&mut readonly_section, SectionIndex::from_u32(0)) reloc.reloc_target = RelocationTarget::CustomSection(
} SectionIndex::from_u32(first_section + index.as_u32()),
}; )
let offset = section.bytes.len() as i64; }
section.bytes.append(&custom_section.bytes); _ => {}
// TODO: we're needlessly rescanning the whole list.
for local_relocation in &local_relocations {
if local_relocation.local_section_index == local_idx {
used_readonly_section = true;
function.relocations.push(Relocation {
kind: local_relocation.kind,
reloc_target: RelocationTarget::CustomSection(section_num),
offset: local_relocation.offset,
addend: local_relocation.addend + offset,
});
} }
} }
module_custom_sections.push(custom_section);
} }
Ok(function) for mut reloc in &mut compiled_function.relocations {
match reloc.reloc_target {
RelocationTarget::CustomSection(index) => {
reloc.reloc_target = RelocationTarget::CustomSection(
SectionIndex::from_u32(first_section + index.as_u32()),
)
}
_ => {}
}
}
compiled_function
}) })
.collect::<Result<Vec<_>, CompileError>>()?
.into_iter()
.collect::<PrimaryMap<LocalFunctionIndex, _>>(); .collect::<PrimaryMap<LocalFunctionIndex, _>>();
let mut custom_sections = PrimaryMap::new(); Ok(Compilation::new(functions, module_custom_sections))
if used_readonly_section {
custom_sections.push(readonly_section);
}
Ok(Compilation::new(functions, custom_sections))
} }
fn compile_wasm_trampolines( fn compile_function_call_trampolines(
&self, &self,
signatures: &[FunctionType], signatures: &[FunctionType],
) -> Result<Vec<FunctionBody>, CompileError> { ) -> Result<Vec<FunctionBody>, CompileError> {
@@ -150,4 +147,12 @@ impl Compiler for LLVMCompiler {
}) })
.collect::<Result<Vec<_>, CompileError>>() .collect::<Result<Vec<_>, CompileError>>()
} }
fn compile_dynamic_function_trampolines(
&self,
module: &Module,
) -> Result<PrimaryMap<FunctionIndex, FunctionBody>, CompileError> {
Ok(PrimaryMap::new())
// unimplemented!("Dynamic funciton trampolines not yet implemented");
}
} }

View File

@@ -8,7 +8,7 @@ use inkwell::targets::{
use inkwell::OptimizationLevel; use inkwell::OptimizationLevel;
use itertools::Itertools; use itertools::Itertools;
use target_lexicon::Architecture; use target_lexicon::Architecture;
use wasmer_compiler::{Compiler, CompilerConfig, CpuFeature, Features, Target}; use wasmer_compiler::{Compiler, CompilerConfig, CpuFeature, Features, Target, Triple};
/// The InkWell Module type /// The InkWell Module type
pub type InkwellModule<'ctx> = inkwell::module::Module<'ctx>; pub type InkwellModule<'ctx> = inkwell::module::Module<'ctx>;
@@ -46,7 +46,30 @@ pub struct LLVMConfig {
impl LLVMConfig { impl LLVMConfig {
/// Creates a new configuration object with the default configuration /// Creates a new configuration object with the default configuration
/// specified. /// specified.
pub fn new(features: Features, target: Target) -> Self { pub fn new(mut features: Features, target: Target) -> Self {
// Override the default multi-value switch
features.multi_value = false;
let operating_system =
if target.triple().operating_system == wasmer_compiler::OperatingSystem::Darwin {
// LLVM detects static relocation + darwin + 64-bit and
// force-enables PIC because MachO doesn't support that
// combination. They don't check whether they're targeting
// MachO, they check whether the OS is set to Darwin.
//
// Since both linux and darwin use SysV ABI, this should work.
wasmer_compiler::OperatingSystem::Linux
} else {
target.triple().operating_system
};
let triple = Triple {
architecture: target.triple().architecture,
vendor: target.triple().vendor.clone(),
operating_system,
environment: target.triple().environment,
binary_format: target_lexicon::BinaryFormat::Elf,
};
let target = Target::new(triple, *target.cpu_features());
Self { Self {
enable_nan_canonicalization: true, enable_nan_canonicalization: true,
enable_verifier: false, enable_verifier: false,

View File

@@ -145,8 +145,9 @@ fn generate_trampoline<'ctx>(
Type::FuncRef => unimplemented!("funcref unimplemented in trampoline"), Type::FuncRef => unimplemented!("funcref unimplemented in trampoline"),
}; };
let mut args_vec = Vec::with_capacity(func_sig.params().len() + 1); let mut args_vec = Vec::with_capacity(func_sig.params().len() + 2);
args_vec.push(callee_vmctx_ptr); args_vec.push(callee_vmctx_ptr);
args_vec.push(caller_vmctx_ptr);
let mut i = 0; let mut i = 0;
for param_ty in func_sig.params().iter() { for param_ty in func_sig.params().iter() {

View File

@@ -29,20 +29,22 @@ use inkwell::{
}; };
use smallvec::SmallVec; use smallvec::SmallVec;
use std::any::Any; use std::any::Any;
use std::collections::HashMap; use std::collections::{HashMap, HashSet};
use std::convert::TryFrom;
use std::num::TryFromIntError;
use crate::config::LLVMConfig; use crate::config::LLVMConfig;
use wasm_common::entity::{EntityRef, PrimaryMap, SecondaryMap}; use wasm_common::entity::{PrimaryMap, SecondaryMap};
use wasm_common::{ use wasm_common::{
FunctionIndex, FunctionType, GlobalIndex, LocalFunctionIndex, MemoryIndex, Mutability, FunctionIndex, FunctionType, GlobalIndex, LocalFunctionIndex, MemoryIndex, MemoryType,
SignatureIndex, TableIndex, Type, Mutability, SignatureIndex, TableIndex, Type,
}; };
use wasmer_compiler::wasmparser::{self, BinaryReader, MemoryImmediate, Operator}; use wasmer_compiler::wasmparser::{self, BinaryReader, MemoryImmediate, Operator};
use wasmer_compiler::{ use wasmer_compiler::{
to_wasm_error, wasm_unsupported, Addend, CodeOffset, CompileError, CompiledFunction, to_wasm_error, wasm_unsupported, Addend, CodeOffset, CompileError, CompiledFunction,
CompiledFunctionFrameInfo, CustomSection, CustomSectionProtection, FunctionAddressMap, CompiledFunctionFrameInfo, CustomSection, CustomSectionProtection, CustomSections,
FunctionBody, FunctionBodyData, InstructionAddressMap, Relocation, RelocationKind, FunctionAddressMap, FunctionBody, FunctionBodyData, InstructionAddressMap, Relocation,
RelocationTarget, SectionBody, SourceLoc, WasmResult, RelocationKind, RelocationTarget, SectionBody, SectionIndex, SourceLoc, WasmResult,
}; };
use wasmer_runtime::libcalls::LibCall; use wasmer_runtime::libcalls::LibCall;
use wasmer_runtime::Module as WasmerCompilerModule; use wasmer_runtime::Module as WasmerCompilerModule;
@@ -52,6 +54,30 @@ use wasmer_runtime::{MemoryPlan, MemoryStyle, TablePlan, VMBuiltinFunctionIndex,
use std::fs; use std::fs;
use std::io::Write; use std::io::Write;
use wasm_common::entity::entity_impl;
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
pub struct ElfSectionIndex(u32);
entity_impl!(ElfSectionIndex);
impl ElfSectionIndex {
pub fn is_undef(&self) -> bool {
self.as_u32() == goblin::elf::section_header::SHN_UNDEF
}
pub fn from_usize(value: usize) -> Result<Self, CompileError> {
match u32::try_from(value) {
Err(_) => Err(CompileError::Codegen(format!(
"elf section index {} does not fit in 32 bits",
value
))),
Ok(value) => Ok(ElfSectionIndex::from_u32(value)),
}
}
pub fn as_usize(&self) -> usize {
self.as_u32() as usize
}
}
// TODO // TODO
fn wptype_to_type(ty: wasmparser::Type) -> WasmResult<Type> { fn wptype_to_type(ty: wasmparser::Type) -> WasmResult<Type> {
match ty { match ty {
@@ -84,15 +110,6 @@ fn const_zero<'ctx>(ty: BasicTypeEnum<'ctx>) -> BasicValueEnum<'ctx> {
} }
} }
// Relocation against a per-function section.
#[derive(Debug)]
pub struct LocalRelocation {
pub kind: RelocationKind,
pub local_section_index: u32,
pub offset: CodeOffset,
pub addend: Addend,
}
impl FuncTranslator { impl FuncTranslator {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
@@ -109,9 +126,9 @@ impl FuncTranslator {
memory_plans: &PrimaryMap<MemoryIndex, MemoryPlan>, memory_plans: &PrimaryMap<MemoryIndex, MemoryPlan>,
table_plans: &PrimaryMap<TableIndex, TablePlan>, table_plans: &PrimaryMap<TableIndex, TablePlan>,
func_names: &SecondaryMap<FunctionIndex, String>, func_names: &SecondaryMap<FunctionIndex, String>,
) -> Result<(CompiledFunction, Vec<LocalRelocation>, Vec<CustomSection>), CompileError> { ) -> Result<(CompiledFunction, CustomSections), CompileError> {
let func_index = wasm_module.func_index(*local_func_index); let func_index = wasm_module.func_index(*local_func_index);
let func_name = func_names.get(func_index).unwrap(); let func_name = &func_names[func_index];
let module_name = match wasm_module.name.as_ref() { let module_name = match wasm_module.name.as_ref() {
None => format!("<anonymous module> function {}", func_name), None => format!("<anonymous module> function {}", func_name),
Some(module_name) => format!("module {} function {}", module_name, func_name), Some(module_name) => format!("module {} function {}", module_name, func_name),
@@ -124,7 +141,7 @@ impl FuncTranslator {
module.set_data_layout(&target_machine.get_target_data().get_data_layout()); module.set_data_layout(&target_machine.get_target_data().get_data_layout());
let wasm_fn_type = wasm_module let wasm_fn_type = wasm_module
.signatures .signatures
.get(*wasm_module.functions.get(func_index).unwrap()) .get(wasm_module.functions[func_index])
.unwrap(); .unwrap();
let intrinsics = Intrinsics::declare(&module, &self.ctx); let intrinsics = Intrinsics::declare(&module, &self.ctx);
@@ -135,7 +152,7 @@ impl FuncTranslator {
// TODO: figure out how many bytes long vmctx is, and mark it dereferenceable. (no need to mark it nonnull once we do this.) // TODO: figure out how many bytes long vmctx is, and mark it dereferenceable. (no need to mark it nonnull once we do this.)
// TODO: mark vmctx nofree // TODO: mark vmctx nofree
func.set_personality_function(intrinsics.personality); func.set_personality_function(intrinsics.personality);
func.as_global_value().set_section("wasmer_function"); func.as_global_value().set_section(".wasmer_function");
let entry = self.ctx.append_basic_block(func, "entry"); let entry = self.ctx.append_basic_block(func, "entry");
let start_of_code = self.ctx.append_basic_block(func, "start_of_code"); let start_of_code = self.ctx.append_basic_block(func, "start_of_code");
@@ -165,7 +182,9 @@ impl FuncTranslator {
for idx in 0..wasm_fn_type.params().len() { for idx in 0..wasm_fn_type.params().len() {
let ty = wasm_fn_type.params()[idx]; let ty = wasm_fn_type.params()[idx];
let ty = type_to_llvm(&intrinsics, ty); let ty = type_to_llvm(&intrinsics, ty);
let value = func.get_nth_param((idx + 1) as u32).unwrap(); let value = func
.get_nth_param((idx as u32).checked_add(2).unwrap())
.unwrap();
// TODO: don't interleave allocas and stores. // TODO: don't interleave allocas and stores.
let alloca = cache_builder.build_alloca(ty, "param"); let alloca = cache_builder.build_alloca(ty, "param");
cache_builder.build_store(alloca, value); cache_builder.build_store(alloca, value);
@@ -207,6 +226,7 @@ impl FuncTranslator {
// TODO: pointer width // TODO: pointer width
vmoffsets: VMOffsets::new(8, &wasm_module), vmoffsets: VMOffsets::new(8, &wasm_module),
wasm_module, wasm_module,
func_names,
}; };
while fcg.state.has_control_frames() { while fcg.state.has_control_frames() {
@@ -303,12 +323,14 @@ impl FuncTranslator {
.unwrap(); .unwrap();
// TODO: remove debugging. // TODO: remove debugging.
/*
let mem_buf_slice = memory_buffer.as_slice(); let mem_buf_slice = memory_buffer.as_slice();
let mut file = fs::File::create(format!("/home/nicholas/code{}.o", func_name)).unwrap(); let mut file = fs::File::create(format!("/home/nicholas/code{}.o", func_name)).unwrap();
let mut pos = 0; let mut pos = 0;
while pos < mem_buf_slice.len() { while pos < mem_buf_slice.len() {
pos += file.write(&mem_buf_slice[pos..]).unwrap(); pos += file.write(&mem_buf_slice[pos..]).unwrap();
} }
*/
let mem_buf_slice = memory_buffer.as_slice(); let mem_buf_slice = memory_buffer.as_slice();
let object = goblin::Object::parse(&mem_buf_slice).unwrap(); let object = goblin::Object::parse(&mem_buf_slice).unwrap();
@@ -332,69 +354,118 @@ impl FuncTranslator {
Some(name.unwrap()) Some(name.unwrap())
}; };
let wasmer_function_idx = elf // Build up a mapping from a section to its relocation sections.
let reloc_sections = elf.shdr_relocs.iter().fold(
HashMap::new(),
|mut map: HashMap<_, Vec<_>>, (section_index, reloc_section)| {
let target_section = elf.section_headers[*section_index].sh_info as usize;
let target_section = ElfSectionIndex::from_usize(target_section).unwrap();
map.entry(target_section).or_default().push(reloc_section);
map
},
);
let mut visited: HashSet<ElfSectionIndex> = HashSet::new();
let mut worklist: Vec<ElfSectionIndex> = Vec::new();
let mut section_targets: HashMap<ElfSectionIndex, RelocationTarget> = HashMap::new();
let wasmer_function_index = elf
.section_headers .section_headers
.iter() .iter()
.enumerate() .enumerate()
.filter(|(_, section)| get_section_name(section) == Some("wasmer_function")) .filter(|(_, section)| get_section_name(section) == Some(".wasmer_function"))
.map(|(idx, _)| idx) .map(|(index, _)| index)
.take(1)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
// TODO: handle errors here instead of asserting. if wasmer_function_index.len() != 1 {
assert!(wasmer_function_idx.len() == 1); return Err(CompileError::Codegen(format!(
let wasmer_function_idx = wasmer_function_idx[0]; "found {} sections named .wasmer_function",
wasmer_function_index.len()
)));
}
let wasmer_function_index = wasmer_function_index[0];
let wasmer_function_index = ElfSectionIndex::from_usize(wasmer_function_index)?;
let bytes = elf.section_headers[wasmer_function_idx].file_range(); let mut section_to_custom_section = HashMap::new();
let bytes = mem_buf_slice[bytes.start..bytes.end].to_vec();
let mut relocations = vec![]; section_targets.insert(
let mut local_relocations = vec![]; wasmer_function_index,
let mut required_custom_sections = HashMap::new(); RelocationTarget::LocalFunc(*local_func_index),
);
for (section_index, reloc_section) in &elf.shdr_relocs { let mut next_custom_section: u32 = 0;
let section_name = get_section_name(&elf.section_headers[*section_index]); let mut elf_section_to_target = |elf_section_index: ElfSectionIndex| {
if section_name != Some(".relawasmer_function") *section_targets.entry(elf_section_index).or_insert_with(|| {
&& section_name != Some(".relwasmer_function") let next = SectionIndex::from_u32(next_custom_section);
section_to_custom_section.insert(elf_section_index, next);
let target = RelocationTarget::CustomSection(next);
next_custom_section += 1;
target
})
};
let section_bytes = |elf_section_index: ElfSectionIndex| {
let elf_section_index = elf_section_index.as_usize();
let byte_range = elf.section_headers[elf_section_index].file_range();
mem_buf_slice[byte_range.start..byte_range.end].to_vec()
};
// From elf section index to list of Relocations. Although we use a Vec,
// the order of relocations is not important.
let mut relocations: HashMap<ElfSectionIndex, Vec<Relocation>> = HashMap::new();
// Each iteration of this loop pulls a section and the relocations
// relocations that apply to it. We begin with the ".wasmer_function"
// section, and then parse all relocation sections that apply to that
// section. Those relocations may refer to additional sections which we
// then add to the worklist until we've visited the closure of
// everything needed to run the code in ".wasmer_function".
//
// `worklist` is the list of sections we have yet to visit. It never
// contains any duplicates or sections we've already visited. `visited`
// contains all the sections we've ever added to the worklist in a set
// so that we can quickly check whether a section is new before adding
// it to worklist. `section_to_custom_section` is filled in with all
// the sections we want to include.
worklist.push(wasmer_function_index);
visited.insert(wasmer_function_index);
while let Some(section_index) = worklist.pop() {
for reloc in reloc_sections
.get(&section_index)
.iter()
.flat_map(|inner| inner.iter().flat_map(|inner2| inner2.iter()))
{ {
continue;
}
for reloc in reloc_section.iter() {
let kind = match reloc.r_type { let kind = match reloc.r_type {
// TODO: these constants are not per-arch, we'll need to // TODO: these constants are not per-arch, we'll need to
// make the whole match per-arch. // make the whole match per-arch.
goblin::elf::reloc::R_X86_64_64 => RelocationKind::Abs8, goblin::elf::reloc::R_X86_64_64 => RelocationKind::Abs8,
_ => unimplemented!("unknown relocation {}", reloc.r_type), _ => {
return Err(CompileError::Codegen(format!(
"unknown ELF relocation {}",
reloc.r_type
)));
}
}; };
let offset = reloc.r_offset as u32; let offset = reloc.r_offset as u32;
let addend = reloc.r_addend.unwrap_or(0); let addend = reloc.r_addend.unwrap_or(0);
let target = reloc.r_sym; let target = reloc.r_sym;
// TODO: error handling // TODO: error handling
let target = elf.syms.get(target).unwrap(); let elf_target = elf.syms.get(target).unwrap();
if target.st_type() == goblin::elf::sym::STT_SECTION { let elf_target_section = ElfSectionIndex::from_usize(elf_target.st_shndx)?;
let len = required_custom_sections.len(); let reloc_target = if elf_target.st_type() == goblin::elf::sym::STT_SECTION {
let entry = required_custom_sections.entry(target.st_shndx); if visited.insert(elf_target_section) {
let local_section_index = *entry.or_insert(len) as _; worklist.push(elf_target_section);
local_relocations.push(LocalRelocation { }
kind, elf_section_to_target(elf_target_section)
local_section_index, } else if elf_target.st_type() == goblin::elf::sym::STT_FUNC
offset, && elf_target_section == wasmer_function_index
addend,
});
} else if target.st_type() == goblin::elf::sym::STT_FUNC
&& target.st_shndx == wasmer_function_idx
{ {
// This is a function referencing its own byte stream. // This is a function referencing its own byte stream.
relocations.push(Relocation { RelocationTarget::LocalFunc(*local_func_index)
kind, } else if elf_target.st_type() == goblin::elf::sym::STT_NOTYPE
reloc_target: RelocationTarget::LocalFunc(*local_func_index), && elf_target_section.is_undef()
offset,
addend,
});
} else if target.st_type() == goblin::elf::sym::STT_NOTYPE
&& target.st_shndx == goblin::elf::section_header::SHN_UNDEF as _
{ {
// Not defined in this .o file. Maybe another local function? // Not defined in this .o file. Maybe another local function?
let name = target.st_name; let name = elf_target.st_name;
let name = elf.strtab.get(name).unwrap().unwrap(); let name = elf.strtab.get(name).unwrap().unwrap();
if let Some((index, _)) = if let Some((index, _)) =
func_names.iter().find(|(_, func_name)| *func_name == name) func_names.iter().find(|(_, func_name)| *func_name == name)
@@ -402,69 +473,78 @@ impl FuncTranslator {
let local_index = wasm_module let local_index = wasm_module
.local_func_index(index) .local_func_index(index)
.expect("Relocation to non-local function"); .expect("Relocation to non-local function");
relocations.push(Relocation { RelocationTarget::LocalFunc(local_index)
kind,
reloc_target: RelocationTarget::LocalFunc(local_index),
offset,
addend,
});
// Maybe a libcall then? // Maybe a libcall then?
} else if let Some(libcall) = libcalls.get(name) { } else if let Some(libcall) = libcalls.get(name) {
relocations.push(Relocation { RelocationTarget::LibCall(*libcall)
kind,
reloc_target: RelocationTarget::LibCall(*libcall),
offset,
addend,
});
} else { } else {
unimplemented!("reference to unknown symbol {}", name); unimplemented!("reference to unknown symbol {}", name);
} }
} else { } else {
unimplemented!("unknown relocation {:?} with target {:?}", reloc, target); unimplemented!("unknown relocation {:?} with target {:?}", reloc, target);
} };
relocations
.entry(section_index)
.or_default()
.push(Relocation {
kind,
reloc_target,
offset,
addend,
});
} }
} }
let mut custom_sections = vec![]; let mut custom_sections = section_to_custom_section
custom_sections.resize( .iter()
required_custom_sections.len(), .map(|(elf_section_index, custom_section_index)| {
CustomSection { (
protection: CustomSectionProtection::Read, custom_section_index,
bytes: SectionBody::default(), CustomSection {
}, protection: CustomSectionProtection::Read,
); bytes: SectionBody::new_with_vec(section_bytes(*elf_section_index)),
for (section_idx, local_section_idx) in required_custom_sections { relocations: relocations
let bytes = elf.section_headers[section_idx as usize].file_range(); .remove_entry(elf_section_index)
let bytes = &mem_buf_slice[bytes.start..bytes.end]; .map_or(vec![], |(_, v)| v),
custom_sections[local_section_idx].bytes.extend(bytes); },
} )
})
.collect::<Vec<_>>();
custom_sections.sort_unstable_by_key(|a| a.0);
let custom_sections = custom_sections
.into_iter()
.map(|(_, v)| v)
.collect::<PrimaryMap<SectionIndex, _>>();
let function_body = FunctionBody {
body: section_bytes(wasmer_function_index),
unwind_info: None,
};
let address_map = FunctionAddressMap { let address_map = FunctionAddressMap {
instructions: vec![InstructionAddressMap { instructions: vec![InstructionAddressMap {
srcloc: SourceLoc::default(), srcloc: SourceLoc::default(),
code_offset: 0, code_offset: 0,
code_len: bytes.len(), code_len: function_body.body.len(),
}], }],
start_srcloc: SourceLoc::default(), start_srcloc: SourceLoc::default(),
end_srcloc: SourceLoc::default(), end_srcloc: SourceLoc::default(),
body_offset: 0, body_offset: 0,
body_len: bytes.len(), body_len: function_body.body.len(),
}; };
Ok(( Ok((
CompiledFunction { CompiledFunction {
body: FunctionBody { body: function_body,
body: bytes,
unwind_info: None,
},
jt_offsets: SecondaryMap::new(), jt_offsets: SecondaryMap::new(),
relocations, relocations: relocations
.remove_entry(&wasmer_function_index)
.map_or(vec![], |(_, v)| v),
frame_info: CompiledFunctionFrameInfo { frame_info: CompiledFunctionFrameInfo {
address_map, address_map,
traps: vec![], traps: vec![],
}, },
}, },
local_relocations,
custom_sections, custom_sections,
)) ))
} }
@@ -741,11 +821,14 @@ fn trap_if_not_representable_as_int<'ctx>(
builder.build_conditional_branch(out_of_bounds, failure_block, continue_block); builder.build_conditional_branch(out_of_bounds, failure_block, continue_block);
builder.position_at_end(failure_block); builder.position_at_end(failure_block);
builder.build_call( let is_nan = builder.build_float_compare(FloatPredicate::UNO, value, value, "is_nan");
intrinsics.throw_trap, let trap_code = builder.build_select(
&[intrinsics.trap_illegal_arithmetic], is_nan,
"throw", intrinsics.trap_bad_conversion_to_integer,
intrinsics.trap_illegal_arithmetic,
"",
); );
builder.build_call(intrinsics.throw_trap, &[trap_code], "throw");
builder.build_unreachable(); builder.build_unreachable();
builder.position_at_end(continue_block); builder.position_at_end(continue_block);
} }
@@ -772,13 +855,14 @@ fn trap_if_zero_or_overflow<'ctx>(
unreachable!() unreachable!()
}; };
let divisor_is_zero = builder.build_int_compare(
IntPredicate::EQ,
right,
int_type.const_int(0, false),
"divisor_is_zero",
);
let should_trap = builder.build_or( let should_trap = builder.build_or(
builder.build_int_compare( divisor_is_zero,
IntPredicate::EQ,
right,
int_type.const_int(0, false),
"divisor_is_zero",
),
builder.build_and( builder.build_and(
builder.build_int_compare(IntPredicate::EQ, left, min_value, "left_is_min"), builder.build_int_compare(IntPredicate::EQ, left, min_value, "left_is_min"),
builder.build_int_compare(IntPredicate::EQ, right, neg_one_value, "right_is_neg_one"), builder.build_int_compare(IntPredicate::EQ, right, neg_one_value, "right_is_neg_one"),
@@ -805,11 +889,13 @@ fn trap_if_zero_or_overflow<'ctx>(
let should_trap_block = context.append_basic_block(*function, "should_trap_block"); let should_trap_block = context.append_basic_block(*function, "should_trap_block");
builder.build_conditional_branch(should_trap, should_trap_block, shouldnt_trap_block); builder.build_conditional_branch(should_trap, should_trap_block, shouldnt_trap_block);
builder.position_at_end(should_trap_block); builder.position_at_end(should_trap_block);
builder.build_call( let trap_code = builder.build_select(
intrinsics.throw_trap, divisor_is_zero,
&[intrinsics.trap_illegal_arithmetic], intrinsics.trap_integer_division_by_zero,
"throw", intrinsics.trap_illegal_arithmetic,
"",
); );
builder.build_call(intrinsics.throw_trap, &[trap_code], "throw");
builder.build_unreachable(); builder.build_unreachable();
builder.position_at_end(shouldnt_trap_block); builder.position_at_end(shouldnt_trap_block);
} }
@@ -849,7 +935,7 @@ fn trap_if_zero<'ctx>(
builder.position_at_end(should_trap_block); builder.position_at_end(should_trap_block);
builder.build_call( builder.build_call(
intrinsics.throw_trap, intrinsics.throw_trap,
&[intrinsics.trap_illegal_arithmetic], &[intrinsics.trap_integer_division_by_zero],
"throw", "throw",
); );
builder.build_unreachable(); builder.build_unreachable();
@@ -1073,6 +1159,13 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
let offset = self.vmoffsets.vmctx_vmmemory_import(memory_index); let offset = self.vmoffsets.vmctx_vmmemory_import(memory_index);
let offset = intrinsics.i32_ty.const_int(offset.into(), false); let offset = intrinsics.i32_ty.const_int(offset.into(), false);
let memory_definition_ptr_ptr = unsafe { builder.build_gep(*vmctx, &[offset], "") }; let memory_definition_ptr_ptr = unsafe { builder.build_gep(*vmctx, &[offset], "") };
let memory_definition_ptr_ptr = builder
.build_bitcast(
memory_definition_ptr_ptr,
intrinsics.i8_ptr_ty.ptr_type(AddressSpace::Generic),
"",
)
.into_pointer_value();
builder builder
.build_load(memory_definition_ptr_ptr, "") .build_load(memory_definition_ptr_ptr, "")
.into_pointer_value() .into_pointer_value()
@@ -1102,71 +1195,28 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
// Compute the offset over the memory_base. // Compute the offset over the memory_base.
let imm_offset = intrinsics.i64_ty.const_int(memarg.offset as u64, false); let imm_offset = intrinsics.i64_ty.const_int(memarg.offset as u64, false);
//let var_offset_i32 = self.state.pop1()?.into_int_value();
let var_offset = builder.build_int_z_extend(var_offset, intrinsics.i64_ty, ""); let var_offset = builder.build_int_z_extend(var_offset, intrinsics.i64_ty, "");
let offset = builder.build_int_add(var_offset, imm_offset, ""); let offset = builder.build_int_add(var_offset, imm_offset, "");
// TODO: must bounds check here or before this point (if applicable)
let value_ptr = unsafe { builder.build_gep(base, &[offset], "") }; let value_ptr = unsafe { builder.build_gep(base, &[offset], "") };
Ok(builder match memory_plans[memory_index] {
.build_bitcast(value_ptr, ptr_ty, "") MemoryPlan {
.into_pointer_value()) style: MemoryStyle::Dynamic,
/* offset_guard_size: _,
let memory_cache = ctx.memory(MemoryIndex::from_u32(0), intrinsics, module, &memory_plans); memory:
let (mem_base, mem_bound, minimum, _maximum) = match memory_cache { MemoryType {
MemoryCache::Dynamic { minimum,
ptr_to_base_ptr, maximum,
ptr_to_bounds, shared: _,
minimum, },
maximum, } => {
} => {
let base = builder
.build_load(ptr_to_base_ptr, "base")
.into_pointer_value();
let bounds = builder.build_load(ptr_to_bounds, "bounds").into_int_value();
tbaa_label(
&module,
intrinsics,
"dynamic_memory_base",
base.as_instruction_value().unwrap(),
Some(0),
);
tbaa_label(
&module,
intrinsics,
"dynamic_memory_bounds",
bounds.as_instruction_value().unwrap(),
Some(0),
);
(base, bounds, minimum, maximum)
}
MemoryCache::Static {
base_ptr,
bounds,
minimum,
maximum,
} => (base_ptr, bounds, minimum, maximum),
};
let mem_base = builder
.build_bitcast(mem_base, intrinsics.i8_ptr_ty, &state.var_name())
.into_pointer_value();
// Compute the offset over the memory_base.
let imm_offset = intrinsics.i64_ty.const_int(memarg.offset as u64, false);
let var_offset_i32 = state.pop1()?.into_int_value();
let var_offset =
builder.build_int_z_extend(var_offset_i32, intrinsics.i64_ty, &state.var_name());
let effective_offset = builder.build_int_add(var_offset, imm_offset, &state.var_name());
if let MemoryCache::Dynamic { .. } = memory_cache {
// If the memory is dynamic, do a bounds check. For static we rely on // If the memory is dynamic, do a bounds check. For static we rely on
// the size being a multiple of the page size and hitting a guard page. // the size being a multiple of the page size and hitting a guard page.
let value_size_v = intrinsics.i64_ty.const_int(value_size as u64, false); let value_size_v = intrinsics.i64_ty.const_int(value_size as u64, false);
let ptr_in_bounds = if effective_offset.is_const() { let ptr_in_bounds = if offset.is_const() {
let load_offset_end = effective_offset.const_add(value_size_v); let load_offset_end = offset.const_add(value_size_v);
let ptr_in_bounds = load_offset_end.const_int_compare( let ptr_in_bounds = load_offset_end.const_int_compare(
IntPredicate::ULE, IntPredicate::ULE,
// TODO: Pages to bytes conversion here
intrinsics.i64_ty.const_int(minimum.bytes().0 as u64, false), intrinsics.i64_ty.const_int(minimum.bytes().0 as u64, false),
); );
if ptr_in_bounds.get_zero_extended_constant() == Some(1) { if ptr_in_bounds.get_zero_extended_constant() == Some(1) {
@@ -1178,14 +1228,16 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
None None
} }
.unwrap_or_else(|| { .unwrap_or_else(|| {
let load_offset_end = let load_offset_end = builder.build_int_add(offset, value_size_v, "");
builder.build_int_add(effective_offset, value_size_v, &state.var_name());
let current_length =
builder.build_load(current_length_ptr, "").into_int_value();
builder.build_int_compare( builder.build_int_compare(
IntPredicate::ULE, IntPredicate::ULE,
load_offset_end, load_offset_end,
mem_bound, current_length,
&state.var_name(), "",
) )
}); });
if !ptr_in_bounds.is_constant_int() if !ptr_in_bounds.is_constant_int()
@@ -1212,7 +1264,8 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
let in_bounds_continue_block = let in_bounds_continue_block =
context.append_basic_block(*function, "in_bounds_continue_block"); context.append_basic_block(*function, "in_bounds_continue_block");
let not_in_bounds_block = context.append_basic_block(*function, "not_in_bounds_block"); let not_in_bounds_block =
context.append_basic_block(*function, "not_in_bounds_block");
builder.build_conditional_branch( builder.build_conditional_branch(
ptr_in_bounds, ptr_in_bounds,
in_bounds_continue_block, in_bounds_continue_block,
@@ -1228,12 +1281,19 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
builder.position_at_end(in_bounds_continue_block); builder.position_at_end(in_bounds_continue_block);
} }
} }
MemoryPlan {
style: MemoryStyle::Static { bound: _ },
offset_guard_size: _,
memory: _,
} => {
// No bounds checks of any kind! Out of bounds memory accesses
// will hit the guard pages.
}
};
let ptr = unsafe { builder.build_gep(mem_base, &[effective_offset], &state.var_name()) }; Ok(builder
Ok(builder .build_bitcast(value_ptr, ptr_ty, "")
.build_bitcast(ptr, ptr_ty, &state.var_name()) .into_pointer_value())
.into_pointer_value())
*/
} }
} }
@@ -1440,6 +1500,7 @@ pub struct LLVMFunctionCodeGenerator<'ctx, 'a> {
module: &'a Module<'ctx>, module: &'a Module<'ctx>,
vmoffsets: VMOffsets, vmoffsets: VMOffsets,
wasm_module: &'a WasmerCompilerModule, wasm_module: &'a WasmerCompilerModule,
func_names: &'a SecondaryMap<FunctionIndex, String>,
} }
impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> { impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
@@ -2166,93 +2227,34 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
Operator::GlobalGet { global_index } => { Operator::GlobalGet { global_index } => {
let global_index = GlobalIndex::from_u32(global_index); let global_index = GlobalIndex::from_u32(global_index);
let global_type = module.globals[global_index]; match ctx.global(global_index, intrinsics) {
let global_value_type = global_type.ty; GlobalCache::Const { value } => {
self.state.push1(value);
// TODO: cache loads of const globals. }
let _global_mutability = global_type.mutability; GlobalCache::Mut { ptr_to_value } => {
let value = builder.build_load(ptr_to_value, "");
let global_ptr = // TODO: tbaa
if let Some(local_global_index) = module.local_global_index(global_index) { self.state.push1(value);
let offset = self.vmoffsets.vmctx_vmglobal_definition(local_global_index); }
let offset = intrinsics.i32_ty.const_int(offset.into(), false); }
unsafe { builder.build_gep(*vmctx, &[offset], "") }
} else {
let offset = self.vmoffsets.vmctx_vmglobal_import(global_index);
let offset = intrinsics.i32_ty.const_int(offset.into(), false);
let global_ptr_ptr = unsafe { builder.build_gep(*vmctx, &[offset], "") };
let global_ptr_ptr = builder
.build_bitcast(global_ptr_ptr, intrinsics.i8_ptr_ty, "")
.into_pointer_value();
builder.build_load(global_ptr_ptr, "").into_pointer_value()
};
let global_ptr = builder
.build_bitcast(
global_ptr,
type_to_llvm_ptr(&intrinsics, global_value_type),
"",
)
.into_pointer_value();
let value = builder.build_load(global_ptr, "");
// TODO: add TBAA info.
self.state.push1(value);
} }
Operator::GlobalSet { global_index } => { Operator::GlobalSet { global_index } => {
let global_index = GlobalIndex::from_u32(global_index); let global_index = GlobalIndex::from_u32(global_index);
let global_type = module.globals[global_index]; match ctx.global(global_index, intrinsics) {
let global_value_type = global_type.ty; GlobalCache::Const { value } => {
return Err(CompileError::Codegen(format!(
// Note that we don't check mutability, assuming that's already "global.set on immutable global index {}",
// been checked by some other verifier. global_index.as_u32()
)))
let global_ptr = }
if let Some(local_global_index) = module.local_global_index(global_index) { GlobalCache::Mut { ptr_to_value } => {
let offset = self.vmoffsets.vmctx_vmglobal_definition(local_global_index); let (value, info) = self.state.pop1_extra()?;
let offset = intrinsics.i32_ty.const_int(offset.into(), false); let value =
unsafe { builder.build_gep(*vmctx, &[offset], "") } apply_pending_canonicalization(builder, intrinsics, value, info);
} else { builder.build_store(ptr_to_value, value);
let offset = self.vmoffsets.vmctx_vmglobal_import(global_index); // TODO: tbaa
let offset = intrinsics.i32_ty.const_int(offset.into(), false); }
let global_ptr_ptr = unsafe { builder.build_gep(*vmctx, &[offset], "") }; }
let global_ptr_ptr = builder
.build_bitcast(global_ptr_ptr, intrinsics.i8_ptr_ty, "")
.into_pointer_value();
builder.build_load(global_ptr_ptr, "").into_pointer_value()
};
let global_ptr = builder
.build_bitcast(
global_ptr,
type_to_llvm_ptr(&intrinsics, global_value_type),
"",
)
.into_pointer_value();
let (value, info) = self.state.pop1_extra()?;
let value = apply_pending_canonicalization(builder, intrinsics, value, info);
builder.build_store(global_ptr, value);
// TODO: add TBAA info
/*
let (value, info) = self.state.pop1_extra()?;
let value = apply_pending_canonicalization(builder, intrinsics, value, info);
let index = GlobalIndex::from_u32(global_index);
let global_cache = ctx.global_cache(index, intrinsics, self.module);
match global_cache {
GlobalCache::Mut { ptr_to_value } => {
let store = builder.build_store(ptr_to_value, value);
tbaa_label(
&self.module,
intrinsics,
"global",
store,
Some(global_index),
);
}
GlobalCache::Const { value: _ } => {
return Err(CompileError::Codegen("global is immutable".to_string()));
}
}
*/
} }
Operator::Select => { Operator::Select => {
@@ -2299,22 +2301,60 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
} }
Operator::Call { function_index } => { Operator::Call { function_index } => {
let func_index = FunctionIndex::from_u32(function_index); let func_index = FunctionIndex::from_u32(function_index);
let sigindex = module.functions.get(func_index).unwrap(); let sigindex = &module.functions[func_index];
let func_type = module.signatures.get(*sigindex).unwrap(); let func_type = &module.signatures[*sigindex];
let func_name = module.func_names.get(&func_index).unwrap(); let func_name = &self.func_names[func_index];
let llvm_func_type = func_type_to_llvm(&self.context, &intrinsics, func_type); let llvm_func_type = func_type_to_llvm(&self.context, &intrinsics, func_type);
let func = self.module.get_function(func_name); let (func, callee_vmctx) = if let Some(local_func_index) =
// TODO: we could do this by comparing function indices instead module.local_func_index(func_index)
// of going through LLVM APIs and string comparisons. {
let func = if func.is_none() { // TODO: we could do this by comparing function indices instead
self.module // of going through LLVM APIs and string comparisons.
.add_function(func_name, llvm_func_type, Some(Linkage::External)) let func = self.module.get_function(func_name);
let func = if func.is_none() {
self.module
.add_function(func_name, llvm_func_type, Some(Linkage::External))
} else {
func.unwrap()
};
(func.as_global_value().as_pointer_value(), ctx.basic())
} else { } else {
func.unwrap() let offset = self.vmoffsets.vmctx_vmfunction_import(func_index);
let offset = intrinsics.i32_ty.const_int(offset.into(), false);
let vmfunction_import_ptr = unsafe { builder.build_gep(*vmctx, &[offset], "") };
let vmfunction_import_ptr = builder
.build_bitcast(
vmfunction_import_ptr,
intrinsics.vmfunction_import_ptr_ty,
"",
)
.into_pointer_value();
let body_ptr_ptr = builder
.build_struct_gep(
vmfunction_import_ptr,
intrinsics.vmfunction_import_body_element,
"",
)
.unwrap();
let body_ptr = builder.build_load(body_ptr_ptr, "");
let body_ptr = builder
.build_bitcast(body_ptr, llvm_func_type.ptr_type(AddressSpace::Generic), "")
.into_pointer_value();
let vmctx_ptr_ptr = builder
.build_struct_gep(
vmfunction_import_ptr,
intrinsics.vmfunction_import_vmctx_element,
"",
)
.unwrap();
let vmctx_ptr = builder.build_load(vmctx_ptr_ptr, "");
(body_ptr, vmctx_ptr)
}; };
let params: Vec<_> = std::iter::once(ctx.basic()) let params: Vec<_> = std::iter::repeat(callee_vmctx)
.take(2)
.chain( .chain(
self.state self.state
.peekn_extra(func_type.params().len())? .peekn_extra(func_type.params().len())?
@@ -2399,7 +2439,7 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
} }
Operator::CallIndirect { index, table_index } => { Operator::CallIndirect { index, table_index } => {
let sigindex = SignatureIndex::from_u32(index); let sigindex = SignatureIndex::from_u32(index);
let func_type = module.signatures.get(sigindex).unwrap(); let func_type = &module.signatures[sigindex];
let expected_dynamic_sigindex = ctx.dynamic_sigindex(sigindex, intrinsics); let expected_dynamic_sigindex = ctx.dynamic_sigindex(sigindex, intrinsics);
let (table_base, table_bound) = ctx.table( let (table_base, table_bound) = ctx.table(
TableIndex::from_u32(table_index), TableIndex::from_u32(table_index),
@@ -2425,7 +2465,7 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
}; };
// Load things from the anyfunc data structure. // Load things from the anyfunc data structure.
let (func_ptr, ctx_ptr, found_dynamic_sigindex) = unsafe { let (func_ptr, found_dynamic_sigindex, ctx_ptr) = unsafe {
( (
builder builder
.build_load( .build_load(
@@ -2435,20 +2475,20 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
"func_ptr", "func_ptr",
) )
.into_pointer_value(), .into_pointer_value(),
builder.build_load(
builder
.build_struct_gep(anyfunc_struct_ptr, 1, "ctx_ptr_ptr")
.unwrap(),
"ctx_ptr",
),
builder builder
.build_load( .build_load(
builder builder
.build_struct_gep(anyfunc_struct_ptr, 2, "sigindex_ptr") .build_struct_gep(anyfunc_struct_ptr, 1, "sigindex_ptr")
.unwrap(), .unwrap(),
"sigindex", "sigindex",
) )
.into_int_value(), .into_int_value(),
builder.build_load(
builder
.build_struct_gep(anyfunc_struct_ptr, 2, "ctx_ptr_ptr")
.unwrap(),
"ctx_ptr",
),
) )
}; };
@@ -2492,12 +2532,16 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
builder.position_at_end(not_in_bounds_block); builder.position_at_end(not_in_bounds_block);
builder.build_call( builder.build_call(
intrinsics.throw_trap, intrinsics.throw_trap,
&[intrinsics.trap_call_indirect_oob], &[intrinsics.trap_table_access_oob],
"throw", "throw",
); );
builder.build_unreachable(); builder.build_unreachable();
builder.position_at_end(in_bounds_continue_block); builder.position_at_end(in_bounds_continue_block);
// Next, check if the table element is initialized.
let elem_initialized = builder.build_is_not_null(func_ptr, "");
// Next, check if the signature id is correct. // Next, check if the signature id is correct.
let sigindices_equal = builder.build_int_compare( let sigindices_equal = builder.build_int_compare(
@@ -2507,15 +2551,18 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
"sigindices_equal", "sigindices_equal",
); );
let initialized_and_sigindices_match =
builder.build_and(elem_initialized, sigindices_equal, "");
// Tell llvm that `expected_dynamic_sigindex` should equal `found_dynamic_sigindex`. // Tell llvm that `expected_dynamic_sigindex` should equal `found_dynamic_sigindex`.
let sigindices_equal = builder let initialized_and_sigindices_match = builder
.build_call( .build_call(
intrinsics.expect_i1, intrinsics.expect_i1,
&[ &[
sigindices_equal.as_basic_value_enum(), initialized_and_sigindices_match.as_basic_value_enum(),
intrinsics.i1_ty.const_int(1, false).as_basic_value_enum(), intrinsics.i1_ty.const_int(1, false).as_basic_value_enum(),
], ],
"sigindices_equal_expect", "initialized_and_sigindices_match_expect",
) )
.try_as_basic_value() .try_as_basic_value()
.left() .left()
@@ -2526,17 +2573,19 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
let sigindices_notequal_block = let sigindices_notequal_block =
context.append_basic_block(function, "sigindices_notequal_block"); context.append_basic_block(function, "sigindices_notequal_block");
builder.build_conditional_branch( builder.build_conditional_branch(
sigindices_equal, initialized_and_sigindices_match,
continue_block, continue_block,
sigindices_notequal_block, sigindices_notequal_block,
); );
builder.position_at_end(sigindices_notequal_block); builder.position_at_end(sigindices_notequal_block);
builder.build_call( let trap_code = builder.build_select(
intrinsics.throw_trap, elem_initialized,
&[intrinsics.trap_call_indirect_sig], intrinsics.trap_call_indirect_sig,
"throw", intrinsics.trap_call_indirect_null,
"",
); );
builder.build_call(intrinsics.throw_trap, &[trap_code], "throw");
builder.build_unreachable(); builder.build_unreachable();
builder.position_at_end(continue_block); builder.position_at_end(continue_block);
@@ -2544,7 +2593,8 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
let pushed_args = self.state.popn_save_extra(func_type.params().len())?; let pushed_args = self.state.popn_save_extra(func_type.params().len())?;
let args: Vec<_> = std::iter::once(ctx_ptr) let args: Vec<_> = std::iter::repeat(ctx_ptr)
.take(2)
.chain(pushed_args.into_iter().enumerate().map(|(i, (v, info))| { .chain(pushed_args.into_iter().enumerate().map(|(i, (v, info))| {
match func_type.params()[i] { match func_type.params()[i] {
Type::F32 => builder.build_bitcast( Type::F32 => builder.build_bitcast(

View File

@@ -143,11 +143,14 @@ pub struct Intrinsics<'ctx> {
pub f64x2_zero: VectorValue<'ctx>, pub f64x2_zero: VectorValue<'ctx>,
pub trap_unreachable: BasicValueEnum<'ctx>, pub trap_unreachable: BasicValueEnum<'ctx>,
pub trap_call_indirect_null: BasicValueEnum<'ctx>,
pub trap_call_indirect_sig: BasicValueEnum<'ctx>, pub trap_call_indirect_sig: BasicValueEnum<'ctx>,
pub trap_call_indirect_oob: BasicValueEnum<'ctx>,
pub trap_memory_oob: BasicValueEnum<'ctx>, pub trap_memory_oob: BasicValueEnum<'ctx>,
pub trap_illegal_arithmetic: BasicValueEnum<'ctx>, pub trap_illegal_arithmetic: BasicValueEnum<'ctx>,
pub trap_integer_division_by_zero: BasicValueEnum<'ctx>,
pub trap_bad_conversion_to_integer: BasicValueEnum<'ctx>,
pub trap_misaligned_atomic: BasicValueEnum<'ctx>, pub trap_misaligned_atomic: BasicValueEnum<'ctx>,
pub trap_table_access_oob: BasicValueEnum<'ctx>,
// VM intrinsics. // VM intrinsics.
pub memory_grow_dynamic_local: FunctionValue<'ctx>, pub memory_grow_dynamic_local: FunctionValue<'ctx>,
@@ -169,6 +172,10 @@ pub struct Intrinsics<'ctx> {
pub experimental_stackmap: FunctionValue<'ctx>, pub experimental_stackmap: FunctionValue<'ctx>,
pub vmfunction_import_ptr_ty: PointerType<'ctx>,
pub vmfunction_import_body_element: u32,
pub vmfunction_import_vmctx_element: u32,
pub vmmemory_definition_ptr_ty: PointerType<'ctx>, pub vmmemory_definition_ptr_ty: PointerType<'ctx>,
pub vmmemory_definition_base_element: u32, pub vmmemory_definition_base_element: u32,
pub vmmemory_definition_current_length_element: u32, pub vmmemory_definition_current_length_element: u32,
@@ -258,8 +265,8 @@ impl<'ctx> Intrinsics<'ctx> {
let anyfunc_ty = context.struct_type( let anyfunc_ty = context.struct_type(
&[ &[
i8_ptr_ty_basic, i8_ptr_ty_basic,
ctx_ptr_ty.as_basic_type_enum(),
sigindex_ty.as_basic_type_enum(), sigindex_ty.as_basic_type_enum(),
ctx_ptr_ty.as_basic_type_enum(),
], ],
false, false,
); );
@@ -475,12 +482,12 @@ impl<'ctx> Intrinsics<'ctx> {
trap_unreachable: i32_ty trap_unreachable: i32_ty
.const_int(TrapCode::UnreachableCodeReached as _, false) .const_int(TrapCode::UnreachableCodeReached as _, false)
.as_basic_value_enum(), .as_basic_value_enum(),
trap_call_indirect_null: i32_ty
.const_int(TrapCode::IndirectCallToNull as _, false)
.as_basic_value_enum(),
trap_call_indirect_sig: i32_ty trap_call_indirect_sig: i32_ty
.const_int(TrapCode::BadSignature as _, false) .const_int(TrapCode::BadSignature as _, false)
.as_basic_value_enum(), .as_basic_value_enum(),
trap_call_indirect_oob: i32_ty
.const_int(TrapCode::OutOfBounds as _, false)
.as_basic_value_enum(),
trap_memory_oob: i32_ty trap_memory_oob: i32_ty
.const_int(TrapCode::OutOfBounds as _, false) .const_int(TrapCode::OutOfBounds as _, false)
.as_basic_value_enum(), .as_basic_value_enum(),
@@ -488,10 +495,19 @@ impl<'ctx> Intrinsics<'ctx> {
trap_illegal_arithmetic: i32_ty trap_illegal_arithmetic: i32_ty
.const_int(TrapCode::IntegerOverflow as _, false) .const_int(TrapCode::IntegerOverflow as _, false)
.as_basic_value_enum(), .as_basic_value_enum(),
trap_integer_division_by_zero: i32_ty
.const_int(TrapCode::IntegerDivisionByZero as _, false)
.as_basic_value_enum(),
trap_bad_conversion_to_integer: i32_ty
.const_int(TrapCode::BadConversionToInteger as _, false)
.as_basic_value_enum(),
// TODO: add misaligned atomic traps to wasmer runtime // TODO: add misaligned atomic traps to wasmer runtime
trap_misaligned_atomic: i32_ty trap_misaligned_atomic: i32_ty
.const_int(TrapCode::Interrupt as _, false) .const_int(TrapCode::Interrupt as _, false)
.as_basic_value_enum(), .as_basic_value_enum(),
trap_table_access_oob: i32_ty
.const_int(TrapCode::TableAccessOutOfBounds as _, false)
.as_basic_value_enum(),
// VM intrinsics. // VM intrinsics.
memory_grow_dynamic_local: module.add_function( memory_grow_dynamic_local: module.add_function(
@@ -577,6 +593,12 @@ impl<'ctx> Intrinsics<'ctx> {
None, None,
), ),
vmfunction_import_ptr_ty: context
.struct_type(&[i8_ptr_ty_basic, i8_ptr_ty_basic], false)
.ptr_type(AddressSpace::Generic),
vmfunction_import_body_element: 0,
vmfunction_import_vmctx_element: 1,
// TODO: this i64 is actually a rust usize // TODO: this i64 is actually a rust usize
vmmemory_definition_ptr_ty: context vmmemory_definition_ptr_ty: context
.struct_type(&[i8_ptr_ty_basic, i64_ptr_ty_basic], false) .struct_type(&[i8_ptr_ty_basic, i64_ptr_ty_basic], false)
@@ -888,9 +910,9 @@ impl<'ctx, 'a> CtxType<'ctx, 'a> {
}) })
} }
pub fn table_prepare( fn table_prepare(
&mut self, &mut self,
index: TableIndex, table_index: TableIndex,
intrinsics: &Intrinsics<'ctx>, intrinsics: &Intrinsics<'ctx>,
module: &Module<'ctx>, module: &Module<'ctx>,
) -> (PointerValue<'ctx>, PointerValue<'ctx>) { ) -> (PointerValue<'ctx>, PointerValue<'ctx>) {
@@ -904,74 +926,77 @@ impl<'ctx, 'a> CtxType<'ctx, 'a> {
let TableCache { let TableCache {
ptr_to_base_ptr, ptr_to_base_ptr,
ptr_to_bounds, ptr_to_bounds,
} = *cached_tables.entry(index).or_insert_with(|| { } = *cached_tables.entry(table_index).or_insert_with(|| {
let (table_array_ptr_ptr, index, field_name) = let (ptr_to_base_ptr, ptr_to_bounds) =
if let Some(local_table_index) = wasm_module.local_table_index(index) { if let Some(local_table_index) = wasm_module.local_table_index(table_index) {
( let offset = intrinsics.i64_ty.const_int(
unsafe { offsets
cache_builder .vmctx_vmtable_definition_base(local_table_index)
.build_struct_gep( .into(),
ctx_ptr_value, false,
offset_to_index(offsets.vmctx_tables_begin()), );
"table_array_ptr_ptr", let ptr_to_base_ptr =
) unsafe { cache_builder.build_gep(ctx_ptr_value, &[offset], "") };
.unwrap() let ptr_to_base_ptr = cache_builder
}, .build_bitcast(
local_table_index.index() as u64, ptr_to_base_ptr,
"context_field_ptr_to_local_table", intrinsics.i8_ptr_ty.ptr_type(AddressSpace::Generic),
) "",
)
.into_pointer_value();
let offset = intrinsics.i64_ty.const_int(
offsets
.vmctx_vmtable_definition_current_elements(local_table_index)
.into(),
false,
);
let ptr_to_bounds =
unsafe { cache_builder.build_gep(ctx_ptr_value, &[offset], "") };
let ptr_to_bounds = cache_builder
.build_bitcast(ptr_to_bounds, intrinsics.i32_ptr_ty, "")
.into_pointer_value();
(ptr_to_base_ptr, ptr_to_bounds)
} else { } else {
( let offset = intrinsics.i64_ty.const_int(
unsafe { offsets.vmctx_vmtable_import_definition(table_index).into(),
cache_builder false,
.build_struct_gep( );
ctx_ptr_value, let definition_ptr_ptr =
offset_to_index(offsets.vmctx_imported_tables_begin()), unsafe { cache_builder.build_gep(ctx_ptr_value, &[offset], "") };
"table_array_ptr_ptr", let definition_ptr_ptr = cache_builder
) .build_bitcast(
.unwrap() definition_ptr_ptr,
}, intrinsics.i8_ptr_ty.ptr_type(AddressSpace::Generic),
index.index() as u64, "",
"context_field_ptr_to_import_table", )
) .into_pointer_value();
let definition_ptr = cache_builder
.build_load(definition_ptr_ptr, "")
.into_pointer_value();
// TODO: TBAA label
let offset = intrinsics
.i64_ty
.const_int(offsets.vmtable_definition_base().into(), false);
let ptr_to_base_ptr =
unsafe { cache_builder.build_gep(definition_ptr, &[offset], "") };
let ptr_to_base_ptr = cache_builder
.build_bitcast(
ptr_to_base_ptr,
intrinsics.i8_ptr_ty.ptr_type(AddressSpace::Generic),
"",
)
.into_pointer_value();
let offset = intrinsics
.i64_ty
.const_int(offsets.vmtable_definition_current_elements().into(), false);
let ptr_to_bounds =
unsafe { cache_builder.build_gep(definition_ptr, &[offset], "") };
let ptr_to_bounds = cache_builder
.build_bitcast(ptr_to_bounds, intrinsics.i32_ptr_ty, "")
.into_pointer_value();
(ptr_to_base_ptr, ptr_to_bounds)
}; };
let table_array_ptr = cache_builder
.build_load(table_array_ptr_ptr, "table_array_ptr")
.into_pointer_value();
tbaa_label(
module,
intrinsics,
field_name,
table_array_ptr.as_instruction_value().unwrap(),
None,
);
let const_index = intrinsics.i32_ty.const_int(index, false);
let table_ptr_ptr = unsafe {
cache_builder.build_in_bounds_gep(table_array_ptr, &[const_index], "table_ptr_ptr")
};
let table_ptr = cache_builder
.build_load(table_ptr_ptr, "table_ptr")
.into_pointer_value();
tbaa_label(
module,
intrinsics,
"table_ptr",
table_array_ptr.as_instruction_value().unwrap(),
Some(index as u32),
);
let (ptr_to_base_ptr, ptr_to_bounds) = unsafe {
(
cache_builder
.build_struct_gep(table_ptr, 0, "base_ptr")
.unwrap(),
cache_builder
.build_struct_gep(table_ptr, 1, "bounds_ptr")
.unwrap(),
)
};
TableCache { TableCache {
ptr_to_base_ptr, ptr_to_base_ptr,
ptr_to_bounds, ptr_to_bounds,
@@ -989,10 +1014,14 @@ impl<'ctx, 'a> CtxType<'ctx, 'a> {
builder: &Builder<'ctx>, builder: &Builder<'ctx>,
) -> (PointerValue<'ctx>, IntValue<'ctx>) { ) -> (PointerValue<'ctx>, IntValue<'ctx>) {
let (ptr_to_base_ptr, ptr_to_bounds) = self.table_prepare(index, intrinsics, module); let (ptr_to_base_ptr, ptr_to_bounds) = self.table_prepare(index, intrinsics, module);
let base_ptr = builder let base_ptr = self
.cache_builder
.build_load(ptr_to_base_ptr, "base_ptr") .build_load(ptr_to_base_ptr, "base_ptr")
.into_pointer_value(); .into_pointer_value();
let bounds = builder.build_load(ptr_to_bounds, "bounds").into_int_value(); let bounds = self
.cache_builder
.build_load(ptr_to_bounds, "bounds")
.into_int_value();
tbaa_label( tbaa_label(
module, module,
intrinsics, intrinsics,
@@ -1059,106 +1088,59 @@ impl<'ctx, 'a> CtxType<'ctx, 'a> {
}) })
} }
pub fn global_cache( pub fn global(
&mut self, &mut self,
index: GlobalIndex, index: GlobalIndex,
intrinsics: &Intrinsics<'ctx>, intrinsics: &Intrinsics<'ctx>,
module: &Module<'ctx>,
) -> GlobalCache<'ctx> { ) -> GlobalCache<'ctx> {
let (cached_globals, ctx_ptr_value, wasm_module, cache_builder, offsets) = ( let (cached_globals, wasm_module, ctx_ptr_value, cache_builder, offsets) = (
&mut self.cached_globals, &mut self.cached_globals,
self.ctx_ptr_value,
self.wasm_module, self.wasm_module,
self.ctx_ptr_value,
&self.cache_builder, &self.cache_builder,
&self.offsets, &self.offsets,
); );
*cached_globals.entry(index).or_insert_with(|| { *cached_globals.entry(index).or_insert_with(|| {
let (globals_array_ptr_ptr, index, mutable, wasmer_ty, field_name) = { let global_type = wasm_module.globals[index];
let desc = wasm_module.globals.get(index).unwrap(); let global_value_type = global_type.ty;
if let Some(_local_global_index) = wasm_module.local_global_index(index) {
(
unsafe {
cache_builder
.build_struct_gep(
ctx_ptr_value,
offset_to_index(offsets.vmctx_globals_begin()),
"globals_array_ptr_ptr",
)
.unwrap()
},
index.index() as u64,
desc.mutability,
desc.ty,
"context_field_ptr_to_local_globals",
)
} else {
(
unsafe {
cache_builder
.build_struct_gep(
ctx_ptr_value,
offset_to_index(offsets.vmctx_imported_globals_begin()),
"globals_array_ptr_ptr",
)
.unwrap()
},
index.index() as u64,
desc.mutability,
desc.ty,
"context_field_ptr_to_imported_globals",
)
}
};
let llvm_ptr_ty = type_to_llvm_ptr(intrinsics, wasmer_ty); let global_mutability = global_type.mutability;
let global_ptr = if let Some(local_global_index) = wasm_module.local_global_index(index)
let global_array_ptr = cache_builder {
.build_load(globals_array_ptr_ptr, "global_array_ptr") let offset = offsets.vmctx_vmglobal_definition(local_global_index);
.into_pointer_value(); let offset = intrinsics.i32_ty.const_int(offset.into(), false);
tbaa_label( unsafe { cache_builder.build_gep(ctx_ptr_value, &[offset], "") }
module, } else {
intrinsics, let offset = offsets.vmctx_vmglobal_import(index);
field_name, let offset = intrinsics.i32_ty.const_int(offset.into(), false);
global_array_ptr.as_instruction_value().unwrap(), let global_ptr_ptr =
None, unsafe { cache_builder.build_gep(ctx_ptr_value, &[offset], "") };
); let global_ptr_ptr = cache_builder
let const_index = intrinsics.i32_ty.const_int(index, false); .build_bitcast(
let global_ptr_ptr = unsafe { global_ptr_ptr,
cache_builder.build_in_bounds_gep( intrinsics.i32_ptr_ty.ptr_type(AddressSpace::Generic),
global_array_ptr, "",
&[const_index], )
"global_ptr_ptr", .into_pointer_value();
) cache_builder
.build_load(global_ptr_ptr, "")
.into_pointer_value()
}; };
let global_ptr = cache_builder let global_ptr = cache_builder
.build_load(global_ptr_ptr, "global_ptr") .build_bitcast(
global_ptr,
type_to_llvm_ptr(&intrinsics, global_value_type),
"",
)
.into_pointer_value(); .into_pointer_value();
tbaa_label(
module,
intrinsics,
"global_ptr",
global_ptr.as_instruction_value().unwrap(),
Some(index as u32),
);
let global_ptr_typed = match global_mutability {
cache_builder.build_pointer_cast(global_ptr, llvm_ptr_ty, "global_ptr_typed"); Mutability::Const => GlobalCache::Const {
value: cache_builder.build_load(global_ptr, ""),
let mutable = mutable == Mutability::Var; },
if mutable { Mutability::Var => GlobalCache::Mut {
GlobalCache::Mut { ptr_to_value: global_ptr,
ptr_to_value: global_ptr_typed, },
}
} else {
let value = cache_builder.build_load(global_ptr_typed, "global_value");
tbaa_label(
module,
intrinsics,
"global",
value.as_instruction_value().unwrap(),
Some(index as u32),
);
GlobalCache::Const { value }
} }
}) })
} }
@@ -1345,7 +1327,8 @@ pub fn func_type_to_llvm<'ctx>(
.params() .params()
.iter() .iter()
.map(|&ty| type_to_llvm(intrinsics, ty)); .map(|&ty| type_to_llvm(intrinsics, ty));
let param_types: Vec<_> = std::iter::once(intrinsics.ctx_ptr_ty.as_basic_type_enum()) let param_types: Vec<_> = std::iter::repeat(intrinsics.ctx_ptr_ty.as_basic_type_enum())
.take(2)
.chain(user_param_types) .chain(user_param_types)
.collect(); .collect();

View File

@@ -19,6 +19,11 @@ wasm-common = { path = "../wasm-common", version = "0.16.2", default-features =
rayon = "1.3.0" rayon = "1.3.0"
serde = { version = "1.0.106", features = ["derive"] } serde = { version = "1.0.106", features = ["derive"] }
more-asserts = "0.2.1" more-asserts = "0.2.1"
dynasm = "0.5"
dynasmrt = "0.5"
lazy_static = "1.4"
byteorder = "1.3"
smallvec = "1"
[badges] [badges]
maintenance = { status = "actively-developed" } maintenance = { status = "actively-developed" }

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,155 @@
use std::collections::BTreeMap;
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub struct RegisterIndex(pub usize);
/// Information of an inline breakpoint.
///
/// TODO: Move this into runtime.
#[derive(Clone, Debug)]
pub struct InlineBreakpoint {
/// Size in bytes taken by this breakpoint's instruction sequence.
pub size: usize,
/// Type of the inline breakpoint.
pub ty: InlineBreakpointType,
}
/// The type of an inline breakpoint.
#[repr(u8)]
#[derive(Copy, Clone, Debug)]
pub enum InlineBreakpointType {
/// A middleware invocation breakpoint.
Middleware,
}
/// A kind of wasm or constant value
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub enum WasmAbstractValue {
/// A wasm runtime value
Runtime,
/// A wasm constant value
Const(u64),
}
/// A container for the state of a running wasm instance.
#[derive(Clone, Debug)]
pub struct MachineState {
/// Stack values.
pub stack_values: Vec<MachineValue>,
/// Register values.
pub register_values: Vec<MachineValue>,
/// Previous frame.
pub prev_frame: BTreeMap<usize, MachineValue>,
/// Wasm stack.
pub wasm_stack: Vec<WasmAbstractValue>,
/// Private depth of the wasm stack.
pub wasm_stack_private_depth: usize,
/// Wasm instruction offset.
pub wasm_inst_offset: usize,
}
/// A diff of two `MachineState`s.
#[derive(Clone, Debug, Default)]
pub struct MachineStateDiff {
/// Last.
pub last: Option<usize>,
/// Stack push.
pub stack_push: Vec<MachineValue>,
/// Stack pop.
pub stack_pop: usize,
/// Register diff.
pub reg_diff: Vec<(RegisterIndex, MachineValue)>,
/// Previous frame diff.
pub prev_frame_diff: BTreeMap<usize, Option<MachineValue>>, // None for removal
/// Wasm stack push.
pub wasm_stack_push: Vec<WasmAbstractValue>,
/// Wasm stack pop.
pub wasm_stack_pop: usize,
/// Private depth of the wasm stack.
pub wasm_stack_private_depth: usize, // absolute value; not a diff.
/// Wasm instruction offset.
pub wasm_inst_offset: usize, // absolute value; not a diff.
}
/// A kind of machine value.
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub enum MachineValue {
/// Undefined.
Undefined,
/// Vmctx.
Vmctx,
/// Vmctx Deref.
VmctxDeref(Vec<usize>),
/// Preserve Register.
PreserveRegister(RegisterIndex),
/// Copy Stack BP Relative.
CopyStackBPRelative(i32), // relative to Base Pointer, in byte offset
/// Explicit Shadow.
ExplicitShadow, // indicates that all values above this are above the shadow region
/// Wasm Stack.
WasmStack(usize),
/// Wasm Local.
WasmLocal(usize),
/// Two Halves.
TwoHalves(Box<(MachineValue, MachineValue)>), // 32-bit values. TODO: optimize: add another type for inner "half" value to avoid boxing?
}
/// A map of function states.
#[derive(Clone, Debug)]
pub struct FunctionStateMap {
/// Initial.
pub initial: MachineState,
/// Local Function Id.
pub local_function_id: usize,
/// Locals.
pub locals: Vec<WasmAbstractValue>,
/// Shadow size.
pub shadow_size: usize, // for single-pass backend, 32 bytes on x86-64
/// Diffs.
pub diffs: Vec<MachineStateDiff>,
/// Wasm Function Header target offset.
pub wasm_function_header_target_offset: Option<SuspendOffset>,
/// Wasm offset to target offset
pub wasm_offset_to_target_offset: BTreeMap<usize, SuspendOffset>,
/// Loop offsets.
pub loop_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
/// Call offsets.
pub call_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
/// Trappable offsets.
pub trappable_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
}
/// A kind of suspend offset.
#[derive(Clone, Copy, Debug)]
pub enum SuspendOffset {
/// A loop.
Loop(usize),
/// A call.
Call(usize),
/// A trappable.
Trappable(usize),
}
/// Info for an offset.
#[derive(Clone, Debug)]
pub struct OffsetInfo {
/// End offset.
pub end_offset: usize, // excluded bound
/// Diff Id.
pub diff_id: usize,
/// Activate offset.
pub activate_offset: usize,
}
/// A map of module state.
#[derive(Clone, Debug)]
pub struct ModuleStateMap {
/// Local functions.
pub local_functions: BTreeMap<usize, FunctionStateMap>,
/// Total size.
pub total_size: usize,
}

View File

@@ -65,7 +65,7 @@ impl Compiler for SinglepassCompiler {
)) ))
} }
fn compile_wasm_trampolines( fn compile_function_call_trampolines(
&self, &self,
_signatures: &[FunctionType], _signatures: &[FunctionType],
) -> Result<Vec<FunctionBody>, CompileError> { ) -> Result<Vec<FunctionBody>, CompileError> {

View File

@@ -41,7 +41,7 @@ impl CompilerConfig for SinglepassConfig {
} }
/// Transform it into the compiler /// Transform it into the compiler
fn compiler(&self) -> Box<dyn Compiler> { fn compiler(&self) -> Box<dyn Compiler + Send> {
Box::new(SinglepassCompiler::new(&self)) Box::new(SinglepassCompiler::new(&self))
} }
} }

File diff suppressed because it is too large Load Diff

View File

@@ -5,11 +5,20 @@
//! including Blockchains and Edge computing where quick compilation //! including Blockchains and Edge computing where quick compilation
//! times are a must, and JIT bombs should never happen. //! times are a must, and JIT bombs should never happen.
//! //!
//! Compared to Cranelift and LLVM, Singlepass is much faster to compile. //! Compared to Cranelift and LLVM, Singlepass compiles much faster but has worse
//! runtime performance.
//!
//! > Note: Singlepass currently depends on Rust nightly features. //! > Note: Singlepass currently depends on Rust nightly features.
#![feature(proc_macro_hygiene)]
mod compiler; mod compiler;
mod config; mod config;
//mod codegen_x64;
mod common_decl;
mod emitter_x64;
mod machine;
mod x64_decl;
pub use crate::compiler::SinglepassCompiler; pub use crate::compiler::SinglepassCompiler;
pub use crate::config::SinglepassConfig; pub use crate::config::SinglepassConfig;

View File

@@ -0,0 +1,513 @@
use crate::common_decl::*;
use crate::emitter_x64::*;
use crate::x64_decl::{new_machine_state, X64Register};
use smallvec::smallvec;
use smallvec::SmallVec;
use std::collections::HashSet;
use wasmparser::Type as WpType;
struct MachineStackOffset(usize);
pub struct Machine {
used_gprs: HashSet<GPR>,
used_xmms: HashSet<XMM>,
stack_offset: MachineStackOffset,
save_area_offset: Option<MachineStackOffset>,
pub state: MachineState,
pub(crate) track_state: bool,
}
impl Machine {
pub fn new() -> Self {
Machine {
used_gprs: HashSet::new(),
used_xmms: HashSet::new(),
stack_offset: MachineStackOffset(0),
save_area_offset: None,
state: new_machine_state(),
track_state: true,
}
}
pub fn get_stack_offset(&self) -> usize {
self.stack_offset.0
}
pub fn get_used_gprs(&self) -> Vec<GPR> {
self.used_gprs.iter().cloned().collect()
}
pub fn get_used_xmms(&self) -> Vec<XMM> {
self.used_xmms.iter().cloned().collect()
}
pub fn get_vmctx_reg() -> GPR {
GPR::R15
}
/// Picks an unused general purpose register for local/stack/argument use.
///
/// This method does not mark the register as used.
pub fn pick_gpr(&self) -> Option<GPR> {
use GPR::*;
static REGS: &'static [GPR] = &[RSI, RDI, R8, R9, R10, R11];
for r in REGS {
if !self.used_gprs.contains(r) {
return Some(*r);
}
}
None
}
/// Picks an unused general purpose register for internal temporary use.
///
/// This method does not mark the register as used.
pub fn pick_temp_gpr(&self) -> Option<GPR> {
use GPR::*;
static REGS: &'static [GPR] = &[RAX, RCX, RDX];
for r in REGS {
if !self.used_gprs.contains(r) {
return Some(*r);
}
}
None
}
/// Acquires a temporary GPR.
pub fn acquire_temp_gpr(&mut self) -> Option<GPR> {
let gpr = self.pick_temp_gpr();
if let Some(x) = gpr {
self.used_gprs.insert(x);
}
gpr
}
/// Releases a temporary GPR.
pub fn release_temp_gpr(&mut self, gpr: GPR) {
assert!(self.used_gprs.remove(&gpr));
}
/// Specify that a given register is in use.
pub fn reserve_unused_temp_gpr(&mut self, gpr: GPR) -> GPR {
assert!(!self.used_gprs.contains(&gpr));
self.used_gprs.insert(gpr);
gpr
}
/// Picks an unused XMM register.
///
/// This method does not mark the register as used.
pub fn pick_xmm(&self) -> Option<XMM> {
use XMM::*;
static REGS: &'static [XMM] = &[XMM3, XMM4, XMM5, XMM6, XMM7];
for r in REGS {
if !self.used_xmms.contains(r) {
return Some(*r);
}
}
None
}
/// Picks an unused XMM register for internal temporary use.
///
/// This method does not mark the register as used.
pub fn pick_temp_xmm(&self) -> Option<XMM> {
use XMM::*;
static REGS: &'static [XMM] = &[XMM0, XMM1, XMM2];
for r in REGS {
if !self.used_xmms.contains(r) {
return Some(*r);
}
}
None
}
/// Acquires a temporary XMM register.
pub fn acquire_temp_xmm(&mut self) -> Option<XMM> {
let xmm = self.pick_temp_xmm();
if let Some(x) = xmm {
self.used_xmms.insert(x);
}
xmm
}
/// Releases a temporary XMM register.
pub fn release_temp_xmm(&mut self, xmm: XMM) {
assert_eq!(self.used_xmms.remove(&xmm), true);
}
/// Acquires locations from the machine state.
///
/// If the returned locations are used for stack value, `release_location` needs to be called on them;
/// Otherwise, if the returned locations are used for locals, `release_location` does not need to be called on them.
pub fn acquire_locations<E: Emitter>(
&mut self,
assembler: &mut E,
tys: &[(WpType, MachineValue)],
zeroed: bool,
) -> SmallVec<[Location; 1]> {
let mut ret = smallvec![];
let mut delta_stack_offset: usize = 0;
for (ty, mv) in tys {
let loc = match *ty {
WpType::F32 | WpType::F64 => self.pick_xmm().map(Location::XMM),
WpType::I32 | WpType::I64 => self.pick_gpr().map(Location::GPR),
_ => unreachable!(),
};
let loc = if let Some(x) = loc {
x
} else {
self.stack_offset.0 += 8;
delta_stack_offset += 8;
Location::Memory(GPR::RBP, -(self.stack_offset.0 as i32))
};
if let Location::GPR(x) = loc {
self.used_gprs.insert(x);
self.state.register_values[X64Register::GPR(x).to_index().0] = mv.clone();
} else if let Location::XMM(x) = loc {
self.used_xmms.insert(x);
self.state.register_values[X64Register::XMM(x).to_index().0] = mv.clone();
} else {
self.state.stack_values.push(mv.clone());
}
self.state.wasm_stack.push(WasmAbstractValue::Runtime);
ret.push(loc);
}
if delta_stack_offset != 0 {
assembler.emit_sub(
Size::S64,
Location::Imm32(delta_stack_offset as u32),
Location::GPR(GPR::RSP),
);
}
if zeroed {
for i in 0..tys.len() {
assembler.emit_mov(Size::S64, Location::Imm32(0), ret[i]);
}
}
ret
}
/// Releases locations used for stack value.
pub fn release_locations<E: Emitter>(&mut self, assembler: &mut E, locs: &[Location]) {
let mut delta_stack_offset: usize = 0;
for loc in locs.iter().rev() {
match *loc {
Location::GPR(ref x) => {
assert_eq!(self.used_gprs.remove(x), true);
self.state.register_values[X64Register::GPR(*x).to_index().0] =
MachineValue::Undefined;
}
Location::XMM(ref x) => {
assert_eq!(self.used_xmms.remove(x), true);
self.state.register_values[X64Register::XMM(*x).to_index().0] =
MachineValue::Undefined;
}
Location::Memory(GPR::RBP, x) => {
if x >= 0 {
unreachable!();
}
let offset = (-x) as usize;
if offset != self.stack_offset.0 {
unreachable!();
}
self.stack_offset.0 -= 8;
delta_stack_offset += 8;
self.state.stack_values.pop().unwrap();
}
_ => {}
}
self.state.wasm_stack.pop().unwrap();
}
if delta_stack_offset != 0 {
assembler.emit_add(
Size::S64,
Location::Imm32(delta_stack_offset as u32),
Location::GPR(GPR::RSP),
);
}
}
pub fn release_locations_only_regs(&mut self, locs: &[Location]) {
for loc in locs.iter().rev() {
match *loc {
Location::GPR(ref x) => {
assert_eq!(self.used_gprs.remove(x), true);
self.state.register_values[X64Register::GPR(*x).to_index().0] =
MachineValue::Undefined;
}
Location::XMM(ref x) => {
assert_eq!(self.used_xmms.remove(x), true);
self.state.register_values[X64Register::XMM(*x).to_index().0] =
MachineValue::Undefined;
}
_ => {}
}
// Wasm state popping is deferred to `release_locations_only_osr_state`.
}
}
pub fn release_locations_only_stack<E: Emitter>(
&mut self,
assembler: &mut E,
locs: &[Location],
) {
let mut delta_stack_offset: usize = 0;
for loc in locs.iter().rev() {
match *loc {
Location::Memory(GPR::RBP, x) => {
if x >= 0 {
unreachable!();
}
let offset = (-x) as usize;
if offset != self.stack_offset.0 {
unreachable!();
}
self.stack_offset.0 -= 8;
delta_stack_offset += 8;
self.state.stack_values.pop().unwrap();
}
_ => {}
}
// Wasm state popping is deferred to `release_locations_only_osr_state`.
}
if delta_stack_offset != 0 {
assembler.emit_add(
Size::S64,
Location::Imm32(delta_stack_offset as u32),
Location::GPR(GPR::RSP),
);
}
}
pub fn release_locations_only_osr_state(&mut self, n: usize) {
for _ in 0..n {
self.state.wasm_stack.pop().unwrap();
}
}
pub fn release_locations_keep_state<E: Emitter>(&self, assembler: &mut E, locs: &[Location]) {
let mut delta_stack_offset: usize = 0;
let mut stack_offset = self.stack_offset.0;
for loc in locs.iter().rev() {
match *loc {
Location::Memory(GPR::RBP, x) => {
if x >= 0 {
unreachable!();
}
let offset = (-x) as usize;
if offset != stack_offset {
unreachable!();
}
stack_offset -= 8;
delta_stack_offset += 8;
}
_ => {}
}
}
if delta_stack_offset != 0 {
assembler.emit_add(
Size::S64,
Location::Imm32(delta_stack_offset as u32),
Location::GPR(GPR::RSP),
);
}
}
pub fn init_locals<E: Emitter>(
&mut self,
a: &mut E,
n: usize,
n_params: usize,
) -> Vec<Location> {
// Use callee-saved registers for locals.
fn get_local_location(idx: usize) -> Location {
match idx {
0 => Location::GPR(GPR::R12),
1 => Location::GPR(GPR::R13),
2 => Location::GPR(GPR::R14),
3 => Location::GPR(GPR::RBX),
_ => Location::Memory(GPR::RBP, -(((idx - 3) * 8) as i32)),
}
}
let mut locations: Vec<Location> = vec![];
let mut allocated: usize = 0;
// Determine locations for parameters.
for i in 0..n_params {
let loc = Self::get_param_location(i + 1);
locations.push(match loc {
Location::GPR(_) => {
let old_idx = allocated;
allocated += 1;
get_local_location(old_idx)
}
Location::Memory(_, _) => {
let old_idx = allocated;
allocated += 1;
get_local_location(old_idx)
}
_ => unreachable!(),
});
}
// Determine locations for normal locals.
for _ in n_params..n {
locations.push(get_local_location(allocated));
allocated += 1;
}
for (i, loc) in locations.iter().enumerate() {
match *loc {
Location::GPR(x) => {
self.state.register_values[X64Register::GPR(x).to_index().0] =
MachineValue::WasmLocal(i);
}
Location::Memory(_, _) => {
self.state.stack_values.push(MachineValue::WasmLocal(i));
}
_ => unreachable!(),
}
}
// How many machine stack slots did all the locals use?
let num_mem_slots = locations
.iter()
.filter(|&&loc| match loc {
Location::Memory(_, _) => true,
_ => false,
})
.count();
// Move RSP down to reserve space for machine stack slots.
if num_mem_slots > 0 {
a.emit_sub(
Size::S64,
Location::Imm32((num_mem_slots * 8) as u32),
Location::GPR(GPR::RSP),
);
self.stack_offset.0 += num_mem_slots * 8;
}
// Save callee-saved registers.
for loc in locations.iter() {
if let Location::GPR(x) = *loc {
a.emit_push(Size::S64, *loc);
self.stack_offset.0 += 8;
self.state.stack_values.push(MachineValue::PreserveRegister(
X64Register::GPR(x).to_index(),
));
}
}
// Save R15 for vmctx use.
a.emit_push(Size::S64, Location::GPR(GPR::R15));
self.stack_offset.0 += 8;
self.state.stack_values.push(MachineValue::PreserveRegister(
X64Register::GPR(GPR::R15).to_index(),
));
// Save the offset of static area.
self.save_area_offset = Some(MachineStackOffset(self.stack_offset.0));
// Load in-register parameters into the allocated locations.
for i in 0..n_params {
let loc = Self::get_param_location(i + 1);
match loc {
Location::GPR(_) => {
a.emit_mov(Size::S64, loc, locations[i]);
}
Location::Memory(_, _) => match locations[i] {
Location::GPR(_) => {
a.emit_mov(Size::S64, loc, locations[i]);
}
Location::Memory(_, _) => {
a.emit_mov(Size::S64, loc, Location::GPR(GPR::RAX));
a.emit_mov(Size::S64, Location::GPR(GPR::RAX), locations[i]);
}
_ => unreachable!(),
},
_ => unreachable!(),
}
}
// Load vmctx.
a.emit_mov(
Size::S64,
Self::get_param_location(0),
Location::GPR(GPR::R15),
);
// Initialize all normal locals to zero.
for i in n_params..n {
a.emit_mov(Size::S64, Location::Imm32(0), locations[i]);
}
locations
}
pub fn finalize_locals<E: Emitter>(&mut self, a: &mut E, locations: &[Location]) {
// Unwind stack to the "save area".
a.emit_lea(
Size::S64,
Location::Memory(
GPR::RBP,
-(self.save_area_offset.as_ref().unwrap().0 as i32),
),
Location::GPR(GPR::RSP),
);
// Restore R15 used by vmctx.
a.emit_pop(Size::S64, Location::GPR(GPR::R15));
// Restore callee-saved registers.
for loc in locations.iter().rev() {
if let Location::GPR(_) = *loc {
a.emit_pop(Size::S64, *loc);
}
}
}
pub fn get_param_location(idx: usize) -> Location {
match idx {
0 => Location::GPR(GPR::RDI),
1 => Location::GPR(GPR::RSI),
2 => Location::GPR(GPR::RDX),
3 => Location::GPR(GPR::RCX),
4 => Location::GPR(GPR::R8),
5 => Location::GPR(GPR::R9),
_ => Location::Memory(GPR::RBP, (16 + (idx - 6) * 8) as i32),
}
}
}
#[cfg(test)]
mod test {
use super::*;
use dynasmrt::x64::Assembler;
#[test]
fn test_release_locations_keep_state_nopanic() {
let mut machine = Machine::new();
let mut assembler = Assembler::new().unwrap();
let locs = machine.acquire_locations(
&mut assembler,
&(0..10)
.map(|_| (WpType::I32, MachineValue::Undefined))
.collect::<Vec<_>>(),
false,
);
machine.release_locations_keep_state(&mut assembler, &locs);
}
}

View File

@@ -0,0 +1,222 @@
//! X64 structures.
use crate::common_decl::{MachineState, MachineValue, RegisterIndex};
use std::collections::BTreeMap;
use wasm_common::Type;
/// General-purpose registers.
#[repr(u8)]
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum GPR {
/// RAX register
RAX,
/// RCX register
RCX,
/// RDX register
RDX,
/// RBX register
RBX,
/// RSP register
RSP,
/// RBP register
RBP,
/// RSI register
RSI,
/// RDI register
RDI,
/// R8 register
R8,
/// R9 register
R9,
/// R10 register
R10,
/// R11 register
R11,
/// R12 register
R12,
/// R13 register
R13,
/// R14 register
R14,
/// R15 register
R15,
}
/// XMM registers.
#[repr(u8)]
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum XMM {
/// XMM register 0
XMM0,
/// XMM register 1
XMM1,
/// XMM register 2
XMM2,
/// XMM register 3
XMM3,
/// XMM register 4
XMM4,
/// XMM register 5
XMM5,
/// XMM register 6
XMM6,
/// XMM register 7
XMM7,
/// XMM register 8
XMM8,
/// XMM register 9
XMM9,
/// XMM register 10
XMM10,
/// XMM register 11
XMM11,
/// XMM register 12
XMM12,
/// XMM register 13
XMM13,
/// XMM register 14
XMM14,
/// XMM register 15
XMM15,
}
/// A machine register under the x86-64 architecture.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum X64Register {
/// General-purpose registers.
GPR(GPR),
/// XMM (floating point/SIMD) registers.
XMM(XMM),
}
impl X64Register {
/// Returns the index of the register.
pub fn to_index(&self) -> RegisterIndex {
match *self {
X64Register::GPR(x) => RegisterIndex(x as usize),
X64Register::XMM(x) => RegisterIndex(x as usize + 16),
}
}
/// Converts a DWARD regnum to X64Register.
pub fn from_dwarf_regnum(x: u16) -> Option<X64Register> {
Some(match x {
0 => X64Register::GPR(GPR::RAX),
1 => X64Register::GPR(GPR::RDX),
2 => X64Register::GPR(GPR::RCX),
3 => X64Register::GPR(GPR::RBX),
4 => X64Register::GPR(GPR::RSI),
5 => X64Register::GPR(GPR::RDI),
6 => X64Register::GPR(GPR::RBP),
7 => X64Register::GPR(GPR::RSP),
8 => X64Register::GPR(GPR::R8),
9 => X64Register::GPR(GPR::R9),
10 => X64Register::GPR(GPR::R10),
11 => X64Register::GPR(GPR::R11),
12 => X64Register::GPR(GPR::R12),
13 => X64Register::GPR(GPR::R13),
14 => X64Register::GPR(GPR::R14),
15 => X64Register::GPR(GPR::R15),
17 => X64Register::XMM(XMM::XMM0),
18 => X64Register::XMM(XMM::XMM1),
19 => X64Register::XMM(XMM::XMM2),
20 => X64Register::XMM(XMM::XMM3),
21 => X64Register::XMM(XMM::XMM4),
22 => X64Register::XMM(XMM::XMM5),
23 => X64Register::XMM(XMM::XMM6),
24 => X64Register::XMM(XMM::XMM7),
_ => return None,
})
}
/// Returns the instruction prefix for `movq %this_reg, ?(%rsp)`.
///
/// To build an instruction, append the memory location as a 32-bit
/// offset to the stack pointer to this prefix.
pub fn prefix_mov_to_stack(&self) -> Option<&'static [u8]> {
Some(match *self {
X64Register::GPR(gpr) => match gpr {
GPR::RDI => &[0x48, 0x89, 0xbc, 0x24],
GPR::RSI => &[0x48, 0x89, 0xb4, 0x24],
GPR::RDX => &[0x48, 0x89, 0x94, 0x24],
GPR::RCX => &[0x48, 0x89, 0x8c, 0x24],
GPR::R8 => &[0x4c, 0x89, 0x84, 0x24],
GPR::R9 => &[0x4c, 0x89, 0x8c, 0x24],
_ => return None,
},
X64Register::XMM(xmm) => match xmm {
XMM::XMM0 => &[0x66, 0x0f, 0xd6, 0x84, 0x24],
XMM::XMM1 => &[0x66, 0x0f, 0xd6, 0x8c, 0x24],
XMM::XMM2 => &[0x66, 0x0f, 0xd6, 0x94, 0x24],
XMM::XMM3 => &[0x66, 0x0f, 0xd6, 0x9c, 0x24],
XMM::XMM4 => &[0x66, 0x0f, 0xd6, 0xa4, 0x24],
XMM::XMM5 => &[0x66, 0x0f, 0xd6, 0xac, 0x24],
XMM::XMM6 => &[0x66, 0x0f, 0xd6, 0xb4, 0x24],
XMM::XMM7 => &[0x66, 0x0f, 0xd6, 0xbc, 0x24],
_ => return None,
},
})
}
}
/// An allocator that allocates registers for function arguments according to the System V ABI.
#[derive(Default)]
pub struct ArgumentRegisterAllocator {
n_gprs: usize,
n_xmms: usize,
}
impl ArgumentRegisterAllocator {
/// Allocates a register for argument type `ty`. Returns `None` if no register is available for this type.
pub fn next(&mut self, ty: Type) -> Option<X64Register> {
static GPR_SEQ: &'static [GPR] =
&[GPR::RDI, GPR::RSI, GPR::RDX, GPR::RCX, GPR::R8, GPR::R9];
static XMM_SEQ: &'static [XMM] = &[
XMM::XMM0,
XMM::XMM1,
XMM::XMM2,
XMM::XMM3,
XMM::XMM4,
XMM::XMM5,
XMM::XMM6,
XMM::XMM7,
];
match ty {
Type::I32 | Type::I64 => {
if self.n_gprs < GPR_SEQ.len() {
let gpr = GPR_SEQ[self.n_gprs];
self.n_gprs += 1;
Some(X64Register::GPR(gpr))
} else {
None
}
}
Type::F32 | Type::F64 => {
if self.n_xmms < XMM_SEQ.len() {
let xmm = XMM_SEQ[self.n_xmms];
self.n_xmms += 1;
Some(X64Register::XMM(xmm))
} else {
None
}
}
_ => todo!(
"ArgumentRegisterAllocator::next: Unsupported type: {:?}",
ty
),
}
}
}
/// Create a new `MachineState` with default values.
pub fn new_machine_state() -> MachineState {
MachineState {
stack_values: vec![],
register_values: vec![MachineValue::Undefined; 16 + 8],
prev_frame: BTreeMap::new(),
wasm_stack: vec![],
wasm_stack_private_depth: 0,
wasm_inst_offset: std::usize::MAX,
}
}

View File

@@ -9,7 +9,9 @@ use crate::target::Target;
use crate::FunctionBodyData; use crate::FunctionBodyData;
use crate::ModuleTranslationState; use crate::ModuleTranslationState;
use wasm_common::entity::PrimaryMap; use wasm_common::entity::PrimaryMap;
use wasm_common::{Features, FunctionType, LocalFunctionIndex, MemoryIndex, TableIndex}; use wasm_common::{
Features, FunctionIndex, FunctionType, LocalFunctionIndex, MemoryIndex, TableIndex,
};
use wasmer_runtime::Module; use wasmer_runtime::Module;
use wasmer_runtime::{MemoryPlan, TablePlan}; use wasmer_runtime::{MemoryPlan, TablePlan};
use wasmparser::{validate, OperatorValidatorConfig, ValidatingParserConfig}; use wasmparser::{validate, OperatorValidatorConfig, ValidatingParserConfig};
@@ -80,8 +82,30 @@ pub trait Compiler {
/// let func = instance.exports.func("my_func"); /// let func = instance.exports.func("my_func");
/// func.call(&[Value::I32(1)]); /// func.call(&[Value::I32(1)]);
/// ``` /// ```
fn compile_wasm_trampolines( fn compile_function_call_trampolines(
&self, &self,
signatures: &[FunctionType], signatures: &[FunctionType],
) -> Result<Vec<FunctionBody>, CompileError>; ) -> Result<Vec<FunctionBody>, CompileError>;
/// Compile the trampolines to call a dynamic function defined in
/// a host, from a Wasm module.
///
/// This allows us to create dynamic Wasm functions, such as:
///
/// ```ignore
/// fn my_func(values: Vec<Val>) -> Vec<Val> {
/// // do something
/// }
///
/// let my_func_type = FuncType::new(vec![Type::I32], vec![Type::I32]);
/// let imports = imports!{
/// "namespace" => {
/// "my_func" => Func::new_dynamic(my_func_type, my_func),s
/// }
/// }
/// ```
fn compile_dynamic_function_trampolines(
&self,
module: &Module,
) -> Result<PrimaryMap<FunctionIndex, FunctionBody>, CompileError>;
} }

View File

@@ -99,7 +99,7 @@ impl Compilation {
self.functions.is_empty() self.functions.is_empty()
} }
/// Gets functions jump table offsets. /// Gets functions relocations.
pub fn get_relocations(&self) -> PrimaryMap<LocalFunctionIndex, Vec<Relocation>> { pub fn get_relocations(&self) -> PrimaryMap<LocalFunctionIndex, Vec<Relocation>> {
self.functions self.functions
.iter() .iter()
@@ -107,7 +107,7 @@ impl Compilation {
.collect::<PrimaryMap<LocalFunctionIndex, _>>() .collect::<PrimaryMap<LocalFunctionIndex, _>>()
} }
/// Gets functions jump table offsets. /// Gets functions bodies.
pub fn get_function_bodies(&self) -> PrimaryMap<LocalFunctionIndex, FunctionBody> { pub fn get_function_bodies(&self) -> PrimaryMap<LocalFunctionIndex, FunctionBody> {
self.functions self.functions
.iter() .iter()
@@ -123,7 +123,7 @@ impl Compilation {
.collect::<PrimaryMap<LocalFunctionIndex, _>>() .collect::<PrimaryMap<LocalFunctionIndex, _>>()
} }
/// Gets functions jump table offsets. /// Gets functions frame info.
pub fn get_frame_info(&self) -> PrimaryMap<LocalFunctionIndex, CompiledFunctionFrameInfo> { pub fn get_frame_info(&self) -> PrimaryMap<LocalFunctionIndex, CompiledFunctionFrameInfo> {
self.functions self.functions
.iter() .iter()
@@ -138,6 +138,14 @@ impl Compilation {
.map(|(_, section)| section.bytes.clone()) .map(|(_, section)| section.bytes.clone())
.collect::<PrimaryMap<SectionIndex, _>>() .collect::<PrimaryMap<SectionIndex, _>>()
} }
/// Gets relocations that apply to custom sections.
pub fn get_custom_section_relocations(&self) -> PrimaryMap<SectionIndex, Vec<Relocation>> {
self.custom_sections
.iter()
.map(|(_, section)| section.relocations.clone())
.collect::<PrimaryMap<SectionIndex, _>>()
}
} }
impl<'a> IntoIterator for &'a Compilation { impl<'a> IntoIterator for &'a Compilation {

View File

@@ -78,6 +78,7 @@ pub use crate::unwind::{CompiledFunctionUnwindInfo, FDERelocEntry, FunctionTable
pub use wasm_common::Features; pub use wasm_common::Features;
#[cfg(feature = "translator")]
/// wasmparser is exported as a module to slim compiler dependencies /// wasmparser is exported as a module to slim compiler dependencies
pub mod wasmparser { pub mod wasmparser {
pub use wasmparser::*; pub use wasmparser::*;

View File

@@ -15,7 +15,7 @@ use crate::{Addend, CodeOffset, JumpTable};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt; use std::fmt;
use wasm_common::entity::PrimaryMap; use wasm_common::entity::PrimaryMap;
use wasm_common::{FunctionIndex, LocalFunctionIndex}; use wasm_common::LocalFunctionIndex;
use wasmer_runtime::libcalls::LibCall; use wasmer_runtime::libcalls::LibCall;
/// Relocation kinds for every ISA. /// Relocation kinds for every ISA.
@@ -105,15 +105,15 @@ impl Relocation {
/// The function returns the relocation address and the delta. /// The function returns the relocation address and the delta.
pub fn for_address(&self, start: usize, target_func_address: u64) -> (usize, u64) { pub fn for_address(&self, start: usize, target_func_address: u64) -> (usize, u64) {
match self.kind { match self.kind {
RelocationKind::Abs8 => unsafe { RelocationKind::Abs8 => {
let reloc_address = start + self.offset as usize; let reloc_address = start + self.offset as usize;
let reloc_addend = self.addend as isize; let reloc_addend = self.addend as isize;
let reloc_abs = (target_func_address) let reloc_abs = (target_func_address)
.checked_add(reloc_addend as u64) .checked_add(reloc_addend as u64)
.unwrap(); .unwrap();
(reloc_address, reloc_abs) (reloc_address, reloc_abs)
}, }
RelocationKind::X86PCRel4 => unsafe { RelocationKind::X86PCRel4 => {
let reloc_address = start + self.offset as usize; let reloc_address = start + self.offset as usize;
let reloc_addend = self.addend as isize; let reloc_addend = self.addend as isize;
let reloc_delta_u32 = (target_func_address as u32) let reloc_delta_u32 = (target_func_address as u32)

View File

@@ -6,6 +6,7 @@
//! it can be patched later by the engine (native or JIT). //! it can be patched later by the engine (native or JIT).
use crate::std::vec::Vec; use crate::std::vec::Vec;
use crate::Relocation;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use wasm_common::entity::entity_impl; use wasm_common::entity::entity_impl;
@@ -23,8 +24,7 @@ pub enum CustomSectionProtection {
Read, Read,
// We don't include `ReadWrite` here because it would complicate freeze // We don't include `ReadWrite` here because it would complicate freeze
// and resumption of executing Modules. // and resumption of executing Modules.
// We also currently don't include `ReadExecute` as we don't have a way // TODO: add `ReadExecute`.
// to represent relocations for this kind of section.
} }
/// A Section for a `Compilation`. /// A Section for a `Compilation`.
@@ -33,8 +33,9 @@ pub enum CustomSectionProtection {
/// in the emitted module. /// in the emitted module.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct CustomSection { pub struct CustomSection {
/// The protection /// Memory protection that applies to this section.
pub protection: CustomSectionProtection, pub protection: CustomSectionProtection,
/// The bytes corresponding to this section. /// The bytes corresponding to this section.
/// ///
/// > Note: These bytes have to be at-least 8-byte aligned /// > Note: These bytes have to be at-least 8-byte aligned
@@ -42,6 +43,9 @@ pub struct CustomSection {
/// > We might need to create another field for alignment in case it's /// > We might need to create another field for alignment in case it's
/// > needed in the future. /// > needed in the future.
pub bytes: SectionBody, pub bytes: SectionBody,
/// Relocations that apply to this custom section.
pub relocations: Vec<Relocation>,
} }
/// The bytes in the section. /// The bytes in the section.
@@ -49,14 +53,9 @@ pub struct CustomSection {
pub struct SectionBody(#[serde(with = "serde_bytes")] Vec<u8>); pub struct SectionBody(#[serde(with = "serde_bytes")] Vec<u8>);
impl SectionBody { impl SectionBody {
/// Extend the section with the bytes given. /// Create a new section body with the given contents.
pub fn extend(&mut self, contents: &[u8]) { pub fn new_with_vec(contents: Vec<u8>) -> Self {
self.0.extend(contents); Self(contents)
}
/// Extends the section by appending bytes from another section.
pub fn append(&mut self, body: &SectionBody) {
self.0.extend(&body.0);
} }
/// Returns a raw pointer to the section's buffer. /// Returns a raw pointer to the section's buffer.
@@ -68,4 +67,9 @@ impl SectionBody {
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
self.0.len() self.0.len()
} }
/// Returns whether or not the section body is empty.
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
} }

View File

@@ -2,23 +2,22 @@
use enumset::{EnumSet, EnumSetType}; use enumset::{EnumSet, EnumSetType};
pub use target_lexicon::{Architecture, CallingConvention, OperatingSystem, Triple}; pub use target_lexicon::{Architecture, CallingConvention, OperatingSystem, Triple};
use crate::std::boxed::Box;
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))] #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
use raw_cpuid::CpuId; use raw_cpuid::CpuId;
/// The nomenclature is inspired by the [raw-cpuid crate]. /// The nomenclature is inspired by the [`cpuid` crate].
/// The list of supported features was initially retrieved from /// The list of supported features was initially retrieved from
/// [cranelift-native]. /// [`cranelift-native`].
/// ///
/// The `CpuFeature` enum vaues are likely to grow closer to the /// The `CpuFeature` enum values are likely to grow closer to the
/// original cpuid. However, we prefer to start small and grow from there. /// original `cpuid`. However, we prefer to start small and grow from there.
/// ///
/// If you would like to use a flag that doesn't exist yet here, please /// If you would like to use a flag that doesn't exist yet here, please
/// open a PR. /// open a PR.
/// ///
/// [cpuid crate]: https://docs.rs/cpuid/0.1.1/cpuid/enum.CpuFeature.html /// [`cpuid` crate]: https://docs.rs/cpuid/0.1.1/cpuid/enum.CpuFeature.html
/// [cranelift-native]: https://github.com/bytecodealliance/cranelift/blob/6988545fd20249b084c53f4761b8c861266f5d31/cranelift-native/src/lib.rs#L51-L92 /// [`cranelift-native`]: https://github.com/bytecodealliance/cranelift/blob/6988545fd20249b084c53f4761b8c861266f5d31/cranelift-native/src/lib.rs#L51-L92
#[allow(missing_docs)] #[allow(missing_docs, clippy::derive_hash_xor_eq)]
#[derive(EnumSetType, Debug, Hash)] #[derive(EnumSetType, Debug, Hash)]
pub enum CpuFeature { pub enum CpuFeature {
// X86 features // X86 features
@@ -48,47 +47,47 @@ impl CpuFeature {
if let Some(info) = cpuid.get_feature_info() { if let Some(info) = cpuid.get_feature_info() {
if info.has_sse2() { if info.has_sse2() {
features.insert(CpuFeature::SSE2); features.insert(Self::SSE2);
} }
if info.has_sse3() { if info.has_sse3() {
features.insert(CpuFeature::SSE3); features.insert(Self::SSE3);
} }
if info.has_ssse3() { if info.has_ssse3() {
features.insert(CpuFeature::SSSE3); features.insert(Self::SSSE3);
} }
if info.has_sse41() { if info.has_sse41() {
features.insert(CpuFeature::SSE41); features.insert(Self::SSE41);
} }
if info.has_sse42() { if info.has_sse42() {
features.insert(CpuFeature::SSE42); features.insert(Self::SSE42);
} }
if info.has_popcnt() { if info.has_popcnt() {
features.insert(CpuFeature::POPCNT); features.insert(Self::POPCNT);
} }
if info.has_avx() { if info.has_avx() {
features.insert(CpuFeature::AVX); features.insert(Self::AVX);
} }
} }
if let Some(info) = cpuid.get_extended_feature_info() { if let Some(info) = cpuid.get_extended_feature_info() {
if info.has_bmi1() { if info.has_bmi1() {
features.insert(CpuFeature::BMI1); features.insert(Self::BMI1);
} }
if info.has_bmi2() { if info.has_bmi2() {
features.insert(CpuFeature::BMI2); features.insert(Self::BMI2);
} }
if info.has_avx2() { if info.has_avx2() {
features.insert(CpuFeature::AVX2); features.insert(Self::AVX2);
} }
if info.has_avx512dq() { if info.has_avx512dq() {
features.insert(CpuFeature::AVX512DQ); features.insert(Self::AVX512DQ);
} }
if info.has_avx512vl() { if info.has_avx512vl() {
features.insert(CpuFeature::AVX512VL); features.insert(Self::AVX512VL);
} }
} }
if let Some(info) = cpuid.get_extended_function_info() { if let Some(info) = cpuid.get_extended_function_info() {
if info.has_lzcnt() { if info.has_lzcnt() {
features.insert(CpuFeature::LZCNT); features.insert(Self::LZCNT);
} }
} }
features features
@@ -111,8 +110,8 @@ pub struct Target {
impl Target { impl Target {
/// Creates a new target given a triple /// Creates a new target given a triple
pub fn new(triple: Triple, cpu_features: EnumSet<CpuFeature>) -> Target { pub fn new(triple: Triple, cpu_features: EnumSet<CpuFeature>) -> Self {
Target { Self {
triple, triple,
cpu_features, cpu_features,
} }
@@ -131,8 +130,8 @@ impl Target {
/// The default for the Target will use the HOST as the triple /// The default for the Target will use the HOST as the triple
impl Default for Target { impl Default for Target {
fn default() -> Target { fn default() -> Self {
Target { Self {
triple: Triple::host(), triple: Triple::host(),
cpu_features: CpuFeature::for_host(), cpu_features: CpuFeature::for_host(),
} }

View File

@@ -1,5 +1,4 @@
use crate::WasmError; use crate::WasmError;
use thiserror::Error;
use wasmparser::BinaryReaderError; use wasmparser::BinaryReaderError;
/// Return an `Err(WasmError::Unsupported(msg))` where `msg` the string built by calling `format!` /// Return an `Err(WasmError::Unsupported(msg))` where `msg` the string built by calling `format!`

View File

@@ -42,26 +42,26 @@ impl CompiledFunctionUnwindInfo {
/// Retuns true is no unwind info data. /// Retuns true is no unwind info data.
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
match self { match self {
CompiledFunctionUnwindInfo::Windows(d) => d.is_empty(), Self::Windows(d) => d.is_empty(),
CompiledFunctionUnwindInfo::FrameLayout(c, _, _) => c.is_empty(), Self::FrameLayout(c, _, _) => c.is_empty(),
} }
} }
/// Returns size of serilized unwind info. /// Returns size of serilized unwind info.
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
match self { match self {
CompiledFunctionUnwindInfo::Windows(d) => d.len(), Self::Windows(d) => d.len(),
CompiledFunctionUnwindInfo::FrameLayout(c, _, _) => c.len(), Self::FrameLayout(c, _, _) => c.len(),
} }
} }
/// Serializes data into byte array. /// Serializes data into byte array.
pub fn serialize(&self, dest: &mut [u8], relocs: &mut Vec<FunctionTableReloc>) { pub fn serialize(&self, dest: &mut [u8], relocs: &mut Vec<FunctionTableReloc>) {
match self { match self {
CompiledFunctionUnwindInfo::Windows(d) => { Self::Windows(d) => {
dest.copy_from_slice(d); dest.copy_from_slice(d);
} }
CompiledFunctionUnwindInfo::FrameLayout(code, _fde_offset, r) => { Self::FrameLayout(code, _fde_offset, r) => {
dest.copy_from_slice(code); dest.copy_from_slice(code);
r.iter().for_each(move |r| { r.iter().for_each(move |r| {
assert_eq!(r.2, 8); assert_eq!(r.2, 8);

View File

@@ -4,8 +4,8 @@ use crate::{CodeMemory, CompiledModule};
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use wasm_common::entity::PrimaryMap; use wasm_common::entity::PrimaryMap;
use wasm_common::{FunctionType, LocalFunctionIndex, MemoryIndex, SignatureIndex, TableIndex}; use wasm_common::{FunctionIndex, FunctionType, LocalFunctionIndex, SignatureIndex};
use wasmer_compiler::{Compilation, CompileError, FunctionBody, Target}; use wasmer_compiler::{CompileError, FunctionBody};
#[cfg(feature = "compiler")] #[cfg(feature = "compiler")]
use wasmer_compiler::{Compiler, CompilerConfig}; use wasmer_compiler::{Compiler, CompilerConfig};
use wasmer_engine::{ use wasmer_engine::{
@@ -13,8 +13,7 @@ use wasmer_engine::{
SerializeError, Tunables, SerializeError, Tunables,
}; };
use wasmer_runtime::{ use wasmer_runtime::{
InstanceHandle, MemoryPlan, Module, SignatureRegistry, TablePlan, VMFunctionBody, InstanceHandle, Module, SignatureRegistry, VMFunctionBody, VMSharedSignatureIndex, VMTrampoline,
VMSharedSignatureIndex, VMTrampoline,
}; };
/// A WebAssembly `JIT` Engine. /// A WebAssembly `JIT` Engine.
@@ -40,7 +39,7 @@ impl JITEngine {
Self { Self {
inner: Arc::new(Mutex::new(JITEngineInner { inner: Arc::new(Mutex::new(JITEngineInner {
compiler: Some(compiler), compiler: Some(compiler),
trampolines: HashMap::new(), function_call_trampolines: HashMap::new(),
code_memory: CodeMemory::new(), code_memory: CodeMemory::new(),
signatures: SignatureRegistry::new(), signatures: SignatureRegistry::new(),
})), })),
@@ -66,7 +65,7 @@ impl JITEngine {
inner: Arc::new(Mutex::new(JITEngineInner { inner: Arc::new(Mutex::new(JITEngineInner {
#[cfg(feature = "compiler")] #[cfg(feature = "compiler")]
compiler: None, compiler: None,
trampolines: HashMap::new(), function_call_trampolines: HashMap::new(),
code_memory: CodeMemory::new(), code_memory: CodeMemory::new(),
signatures: SignatureRegistry::new(), signatures: SignatureRegistry::new(),
})), })),
@@ -108,8 +107,8 @@ impl Engine for JITEngine {
} }
/// Retrieves a trampoline given a signature /// Retrieves a trampoline given a signature
fn trampoline(&self, sig: VMSharedSignatureIndex) -> Option<VMTrampoline> { fn function_call_trampoline(&self, sig: VMSharedSignatureIndex) -> Option<VMTrampoline> {
self.compiler().trampoline(sig) self.compiler().function_call_trampoline(sig)
} }
/// Validates a WebAssembly module /// Validates a WebAssembly module
@@ -129,7 +128,7 @@ impl Engine for JITEngine {
resolver: &dyn Resolver, resolver: &dyn Resolver,
) -> Result<InstanceHandle, InstantiationError> { ) -> Result<InstanceHandle, InstantiationError> {
let compiled_module = compiled_module.downcast_ref::<CompiledModule>().unwrap(); let compiled_module = compiled_module.downcast_ref::<CompiledModule>().unwrap();
unsafe { compiled_module.instantiate(&self, resolver, Box::new(())) } compiled_module.instantiate(&self, resolver, Box::new(()))
} }
/// Finish the instantiation of a WebAssembly module /// Finish the instantiation of a WebAssembly module
@@ -139,7 +138,7 @@ impl Engine for JITEngine {
handle: &InstanceHandle, handle: &InstanceHandle,
) -> Result<(), InstantiationError> { ) -> Result<(), InstantiationError> {
let compiled_module = compiled_module.downcast_ref::<CompiledModule>().unwrap(); let compiled_module = compiled_module.downcast_ref::<CompiledModule>().unwrap();
unsafe { compiled_module.finish_instantiation(&handle) } compiled_module.finish_instantiation(&handle)
} }
/// Serializes a WebAssembly module /// Serializes a WebAssembly module
@@ -174,7 +173,7 @@ pub struct JITEngineInner {
#[cfg(feature = "compiler")] #[cfg(feature = "compiler")]
compiler: Option<Box<dyn Compiler + Send>>, compiler: Option<Box<dyn Compiler + Send>>,
/// Pointers to trampoline functions used to enter particular signatures /// Pointers to trampoline functions used to enter particular signatures
trampolines: HashMap<VMSharedSignatureIndex, VMTrampoline>, function_call_trampolines: HashMap<VMSharedSignatureIndex, VMTrampoline>,
/// The code memory is responsible of publishing the compiled /// The code memory is responsible of publishing the compiled
/// functions to memory. /// functions to memory.
code_memory: CodeMemory, code_memory: CodeMemory,
@@ -208,12 +207,19 @@ impl JITEngineInner {
} }
/// Compile the given function bodies. /// Compile the given function bodies.
pub(crate) fn allocate<'data>( pub(crate) fn allocate(
&mut self, &mut self,
module: &Module, module: &Module,
functions: &PrimaryMap<LocalFunctionIndex, FunctionBody>, functions: &PrimaryMap<LocalFunctionIndex, FunctionBody>,
trampolines: &PrimaryMap<SignatureIndex, FunctionBody>, function_call_trampolines: &PrimaryMap<SignatureIndex, FunctionBody>,
) -> Result<PrimaryMap<LocalFunctionIndex, *mut [VMFunctionBody]>, CompileError> { dynamic_function_trampolines: &PrimaryMap<FunctionIndex, FunctionBody>,
) -> Result<
(
PrimaryMap<LocalFunctionIndex, *mut [VMFunctionBody]>,
PrimaryMap<FunctionIndex, *const VMFunctionBody>,
),
CompileError,
> {
// Allocate all of the compiled functions into executable memory, // Allocate all of the compiled functions into executable memory,
// copying over their contents. // copying over their contents.
let allocated_functions = let allocated_functions =
@@ -226,10 +232,10 @@ impl JITEngineInner {
)) ))
})?; })?;
for (sig_index, compiled_function) in trampolines.iter() { for (sig_index, compiled_function) in function_call_trampolines.iter() {
let func_type = module.signatures.get(sig_index).unwrap(); let func_type = module.signatures.get(sig_index).unwrap();
let index = self.signatures.register(&func_type); let index = self.signatures.register(&func_type);
if self.trampolines.contains_key(&index) { if self.function_call_trampolines.contains_key(&index) {
// We don't need to allocate the trampoline in case // We don't need to allocate the trampoline in case
// it's signature is already allocated. // it's signature is already allocated.
continue; continue;
@@ -239,16 +245,34 @@ impl JITEngineInner {
.allocate_for_function(&compiled_function) .allocate_for_function(&compiled_function)
.map_err(|message| { .map_err(|message| {
CompileError::Resource(format!( CompileError::Resource(format!(
"failed to allocate memory for trampolines: {}", "failed to allocate memory for function call trampolines: {}",
message message
)) ))
})? })?
.as_ptr(); .as_ptr();
let trampoline = let trampoline =
unsafe { std::mem::transmute::<*const VMFunctionBody, VMTrampoline>(ptr) }; unsafe { std::mem::transmute::<*const VMFunctionBody, VMTrampoline>(ptr) };
self.trampolines.insert(index, trampoline); self.function_call_trampolines.insert(index, trampoline);
} }
Ok(allocated_functions)
let allocated_dynamic_function_trampolines = dynamic_function_trampolines
.values()
.map(|compiled_function| {
let ptr = self
.code_memory
.allocate_for_function(&compiled_function)
.map_err(|message| {
CompileError::Resource(format!(
"failed to allocate memory for dynamic function trampolines: {}",
message
))
})?
.as_ptr();
Ok(ptr)
})
.collect::<Result<PrimaryMap<FunctionIndex, _>, CompileError>>()?;
Ok((allocated_functions, allocated_dynamic_function_trampolines))
} }
/// Make memory containing compiled code executable. /// Make memory containing compiled code executable.
@@ -262,7 +286,7 @@ impl JITEngineInner {
} }
/// Gets the trampoline pre-registered for a particular signature /// Gets the trampoline pre-registered for a particular signature
pub fn trampoline(&self, sig: VMSharedSignatureIndex) -> Option<VMTrampoline> { pub fn function_call_trampoline(&self, sig: VMSharedSignatureIndex) -> Option<VMTrampoline> {
self.trampolines.get(&sig).cloned() self.function_call_trampolines.get(&sig).cloned()
} }
} }

View File

@@ -27,6 +27,11 @@ impl FunctionTable {
self.functions.len() self.functions.len()
} }
/// Returns whether or not the function table is empty.
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Adds a function to the table based off of the start offset, end offset, and unwind offset. /// Adds a function to the table based off of the start offset, end offset, and unwind offset.
/// ///
/// The offsets are from the "module base", which is provided when the table is published. /// The offsets are from the "module base", which is provided when the table is published.
@@ -131,6 +136,11 @@ impl FunctionTable {
self.functions.len() self.functions.len()
} }
/// Returns whether or not the function table is empty.
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Adds a function to the table based off of the start offset, end offset, and unwind offset. /// Adds a function to the table based off of the start offset, end offset, and unwind offset.
/// ///
/// The offsets are from the "module base", which is provided when the table is published. /// The offsets are from the "module base", which is provided when the table is published.

View File

@@ -4,71 +4,80 @@ use std::ptr::write_unaligned;
use wasm_common::entity::{EntityRef, PrimaryMap}; use wasm_common::entity::{EntityRef, PrimaryMap};
use wasm_common::LocalFunctionIndex; use wasm_common::LocalFunctionIndex;
use wasmer_compiler::{ use wasmer_compiler::{
JumpTable, JumpTableOffsets, RelocationKind, RelocationTarget, Relocations, SectionBody, JumpTable, JumpTableOffsets, Relocation, RelocationKind, RelocationTarget, Relocations,
SectionIndex, SectionBody, SectionIndex,
}; };
use wasmer_runtime::Module; use wasmer_runtime::Module;
use wasmer_runtime::VMFunctionBody; use wasmer_runtime::VMFunctionBody;
fn apply_relocation(
body: usize,
r: &Relocation,
allocated_functions: &PrimaryMap<LocalFunctionIndex, *mut [VMFunctionBody]>,
jt_offsets: &PrimaryMap<LocalFunctionIndex, JumpTableOffsets>,
allocated_sections: &PrimaryMap<SectionIndex, SectionBody>,
) {
let target_func_address: usize = match r.reloc_target {
RelocationTarget::LocalFunc(index) => {
let fatptr: *const [VMFunctionBody] = allocated_functions[index];
fatptr as *const VMFunctionBody as usize
}
RelocationTarget::LibCall(libcall) => libcall.function_pointer(),
RelocationTarget::CustomSection(custom_section) => {
allocated_sections[custom_section].as_ptr() as usize
}
RelocationTarget::JumpTable(func_index, jt) => {
let offset = *jt_offsets
.get(func_index)
.and_then(|ofs| ofs.get(JumpTable::new(jt.index())))
.expect("func jump table");
let fatptr: *const [VMFunctionBody] = allocated_functions[func_index];
fatptr as *const VMFunctionBody as usize + offset as usize
}
};
match r.kind {
#[cfg(target_pointer_width = "64")]
RelocationKind::Abs8 => unsafe {
let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
write_unaligned(reloc_address as *mut u64, reloc_delta);
},
#[cfg(target_pointer_width = "32")]
RelocationKind::X86PCRel4 => unsafe {
let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
write_unaligned(reloc_address as *mut u32, reloc_delta);
},
#[cfg(target_pointer_width = "32")]
RelocationKind::X86CallPCRel4 => {
let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
write_unaligned(reloc_address as *mut u32, reloc_delta);
}
RelocationKind::X86PCRelRodata4 => {}
_ => panic!("Relocation kind unsupported in the current architecture"),
}
}
/// Links a module, patching the allocated functions with the /// Links a module, patching the allocated functions with the
/// required relocations and jump tables. /// required relocations and jump tables.
pub fn link_module( pub fn link_module(
module: &Module, _module: &Module,
allocated_functions: &PrimaryMap<LocalFunctionIndex, *mut [VMFunctionBody]>, allocated_functions: &PrimaryMap<LocalFunctionIndex, *mut [VMFunctionBody]>,
jt_offsets: &PrimaryMap<LocalFunctionIndex, JumpTableOffsets>, jt_offsets: &PrimaryMap<LocalFunctionIndex, JumpTableOffsets>,
relocations: Relocations, function_relocations: Relocations,
allocated_sections: &PrimaryMap<SectionIndex, SectionBody>, allocated_sections: &PrimaryMap<SectionIndex, SectionBody>,
section_relocations: &PrimaryMap<SectionIndex, Vec<Relocation>>,
) { ) {
for (i, function_relocs) in relocations.into_iter() { for (i, section_relocs) in section_relocations.iter() {
let body = allocated_sections[i].as_ptr() as usize;
for r in section_relocs {
apply_relocation(body, r, allocated_functions, jt_offsets, allocated_sections);
}
}
for (i, function_relocs) in function_relocations.into_iter() {
let fatptr: *const [VMFunctionBody] = allocated_functions[i];
let body = fatptr as *const VMFunctionBody as usize;
for r in function_relocs { for r in function_relocs {
let target_func_address: usize = match r.reloc_target { apply_relocation(body, r, allocated_functions, jt_offsets, allocated_sections);
RelocationTarget::LocalFunc(index) => {
let fatptr: *const [VMFunctionBody] = allocated_functions[index];
fatptr as *const VMFunctionBody as usize
}
RelocationTarget::LibCall(libcall) => libcall.function_pointer(),
RelocationTarget::CustomSection(custom_section) => {
allocated_sections[custom_section].as_ptr() as usize
}
RelocationTarget::JumpTable(func_index, jt) => {
let offset = *jt_offsets
.get(func_index)
.and_then(|ofs| ofs.get(JumpTable::new(jt.index())))
.expect("func jump table");
let fatptr: *const [VMFunctionBody] = allocated_functions[func_index];
fatptr as *const VMFunctionBody as usize + offset as usize
}
};
let fatptr: *const [VMFunctionBody] = allocated_functions[i];
let body = fatptr as *const VMFunctionBody;
match r.kind {
#[cfg(target_pointer_width = "64")]
RelocationKind::Abs8 => unsafe {
let (reloc_address, reloc_delta) =
r.for_address(body as usize, target_func_address as u64);
write_unaligned(reloc_address as *mut u64, reloc_delta);
},
#[cfg(target_pointer_width = "32")]
RelocationKind::X86PCRel4 => unsafe {
let (reloc_address, reloc_delta) =
r.for_address(body as usize, target_func_address as u64);
write_unaligned(reloc_address as *mut u32, reloc_delta);
},
#[cfg(target_pointer_width = "32")]
RelocationKind::X86CallPCRel4 => {
let (reloc_address, reloc_delta) =
r.for_address(body as usize, target_func_address as u64);
write_unaligned(reloc_address as *mut u32, reloc_delta);
}
RelocationKind::X86CallPLTRel4 => unsafe {
let (reloc_address, reloc_delta) =
r.for_address(body as usize, target_func_address as u64);
write_unaligned(reloc_address as *mut u32, reloc_delta as u32);
},
RelocationKind::X86PCRelRodata4 => {}
_ => panic!("Relocation kind unsupported in the current architecture"),
}
} }
} }
} }

View File

@@ -4,25 +4,23 @@
use crate::engine::{JITEngine, JITEngineInner}; use crate::engine::{JITEngine, JITEngineInner};
use crate::link::link_module; use crate::link::link_module;
use crate::serialize::{SerializableCompilation, SerializableModule}; use crate::serialize::{SerializableCompilation, SerializableModule};
use serde::{Deserialize, Serialize};
use std::any::Any; use std::any::Any;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use wasm_common::entity::{BoxedSlice, EntityRef, PrimaryMap}; use wasm_common::entity::{BoxedSlice, PrimaryMap};
use wasm_common::{ use wasm_common::{
DataInitializer, LocalFunctionIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, DataInitializer, FunctionIndex, LocalFunctionIndex, MemoryIndex, OwnedDataInitializer,
MemoryIndex, OwnedDataInitializer, SignatureIndex, TableIndex, SignatureIndex, TableIndex,
}; };
use wasmer_compiler::CompileError; use wasmer_compiler::CompileError;
#[cfg(feature = "compiler")] #[cfg(feature = "compiler")]
use wasmer_compiler::ModuleEnvironment; use wasmer_compiler::ModuleEnvironment;
use wasmer_engine::{ use wasmer_engine::{
register_frame_info, resolve_imports, CompiledModule as BaseCompiledModule, DeserializeError, register_frame_info, resolve_imports, CompiledModule as BaseCompiledModule, DeserializeError,
Engine, GlobalFrameInfoRegistration, InstantiationError, LinkError, Resolver, RuntimeError, Engine, GlobalFrameInfoRegistration, InstantiationError, Resolver, RuntimeError,
SerializableFunctionFrameInfo, SerializeError, Tunables, SerializableFunctionFrameInfo, SerializeError,
}; };
use wasmer_runtime::{ use wasmer_runtime::{
InstanceHandle, LinearMemory, Module, SignatureRegistry, Table, VMFunctionBody, InstanceHandle, Module, SignatureRegistry, VMFunctionBody, VMSharedSignatureIndex,
VMGlobalDefinition, VMSharedSignatureIndex,
}; };
use wasmer_runtime::{MemoryPlan, TablePlan}; use wasmer_runtime::{MemoryPlan, TablePlan};
@@ -32,6 +30,7 @@ pub struct CompiledModule {
serializable: SerializableModule, serializable: SerializableModule,
finished_functions: BoxedSlice<LocalFunctionIndex, *mut [VMFunctionBody]>, finished_functions: BoxedSlice<LocalFunctionIndex, *mut [VMFunctionBody]>,
finished_dynamic_function_trampolines: BoxedSlice<FunctionIndex, *const VMFunctionBody>,
signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>, signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
frame_info_registration: Mutex<Option<Option<GlobalFrameInfoRegistration>>>, frame_info_registration: Mutex<Option<Option<GlobalFrameInfoRegistration>>>,
} }
@@ -44,9 +43,7 @@ impl CompiledModule {
let mut jit_compiler = jit.compiler_mut(); let mut jit_compiler = jit.compiler_mut();
let tunables = jit.tunables(); let tunables = jit.tunables();
let translation = environ let translation = environ.translate(data).map_err(CompileError::Wasm)?;
.translate(data)
.map_err(|error| CompileError::Wasm(error))?;
let memory_plans: PrimaryMap<MemoryIndex, MemoryPlan> = translation let memory_plans: PrimaryMap<MemoryIndex, MemoryPlan> = translation
.module .module
@@ -79,11 +76,14 @@ impl CompiledModule {
.values() .values()
.cloned() .cloned()
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let trampolines = compiler let function_call_trampolines = compiler
.compile_wasm_trampolines(&func_types)? .compile_function_call_trampolines(&func_types)?
.into_iter() .into_iter()
.collect::<PrimaryMap<SignatureIndex, _>>(); .collect::<PrimaryMap<SignatureIndex, _>>();
let dynamic_function_trampolines =
compiler.compile_dynamic_function_trampolines(&translation.module)?;
let data_initializers = translation let data_initializers = translation
.data_initializers .data_initializers
.iter() .iter()
@@ -102,8 +102,10 @@ impl CompiledModule {
function_relocations: compilation.get_relocations(), function_relocations: compilation.get_relocations(),
function_jt_offsets: compilation.get_jt_offsets(), function_jt_offsets: compilation.get_jt_offsets(),
function_frame_info: frame_infos, function_frame_info: frame_infos,
trampolines, function_call_trampolines,
dynamic_function_trampolines,
custom_sections: compilation.get_custom_sections(), custom_sections: compilation.get_custom_sections(),
custom_section_relocations: compilation.get_custom_section_relocations(),
}; };
let serializable = SerializableModule { let serializable = SerializableModule {
compilation: serializable_compilation, compilation: serializable_compilation,
@@ -134,15 +136,14 @@ impl CompiledModule {
} }
/// Deserialize a CompiledModule /// Deserialize a CompiledModule
pub fn deserialize(jit: &JITEngine, bytes: &[u8]) -> Result<CompiledModule, DeserializeError> { pub fn deserialize(jit: &JITEngine, bytes: &[u8]) -> Result<Self, DeserializeError> {
// let r = flexbuffers::Reader::get_root(bytes).map_err(|e| DeserializeError::CorruptedBinary(format!("{:?}", e)))?; // let r = flexbuffers::Reader::get_root(bytes).map_err(|e| DeserializeError::CorruptedBinary(format!("{:?}", e)))?;
// let serializable = SerializableModule::deserialize(r).map_err(|e| DeserializeError::CorruptedBinary(format!("{:?}", e)))?; // let serializable = SerializableModule::deserialize(r).map_err(|e| DeserializeError::CorruptedBinary(format!("{:?}", e)))?;
let serializable: SerializableModule = bincode::deserialize(bytes) let serializable: SerializableModule = bincode::deserialize(bytes)
.map_err(|e| DeserializeError::CorruptedBinary(format!("{:?}", e)))?; .map_err(|e| DeserializeError::CorruptedBinary(format!("{:?}", e)))?;
Self::from_parts(&mut jit.compiler_mut(), serializable) Self::from_parts(&mut jit.compiler_mut(), serializable).map_err(DeserializeError::Compiler)
.map_err(|e| DeserializeError::Compiler(e))
} }
/// Construct a `CompiledModule` from component parts. /// Construct a `CompiledModule` from component parts.
@@ -150,10 +151,11 @@ impl CompiledModule {
jit_compiler: &mut JITEngineInner, jit_compiler: &mut JITEngineInner,
serializable: SerializableModule, serializable: SerializableModule,
) -> Result<Self, CompileError> { ) -> Result<Self, CompileError> {
let finished_functions = jit_compiler.allocate( let (finished_functions, finished_dynamic_function_trampolines) = jit_compiler.allocate(
&serializable.module, &serializable.module,
&serializable.compilation.function_bodies, &serializable.compilation.function_bodies,
&serializable.compilation.trampolines, &serializable.compilation.function_call_trampolines,
&serializable.compilation.dynamic_function_trampolines,
)?; )?;
link_module( link_module(
@@ -162,6 +164,7 @@ impl CompiledModule {
&serializable.compilation.function_jt_offsets, &serializable.compilation.function_jt_offsets,
serializable.compilation.function_relocations.clone(), serializable.compilation.function_relocations.clone(),
&serializable.compilation.custom_sections, &serializable.compilation.custom_sections,
&serializable.compilation.custom_section_relocations,
); );
// Compute indices into the shared signature table. // Compute indices into the shared signature table.
@@ -181,6 +184,8 @@ impl CompiledModule {
Ok(Self { Ok(Self {
serializable, serializable,
finished_functions: finished_functions.into_boxed_slice(), finished_functions: finished_functions.into_boxed_slice(),
finished_dynamic_function_trampolines: finished_dynamic_function_trampolines
.into_boxed_slice(),
signatures: signatures.into_boxed_slice(), signatures: signatures.into_boxed_slice(),
frame_info_registration: Mutex::new(None), frame_info_registration: Mutex::new(None),
}) })
@@ -212,6 +217,7 @@ impl CompiledModule {
&self.module(), &self.module(),
&sig_registry, &sig_registry,
resolver, resolver,
&self.finished_dynamic_function_trampolines,
self.memory_plans(), self.memory_plans(),
self.table_plans(), self.table_plans(),
) )

View File

@@ -2,7 +2,8 @@ use serde::{Deserialize, Serialize};
use std::sync::Arc; use std::sync::Arc;
use wasm_common::entity::PrimaryMap; use wasm_common::entity::PrimaryMap;
use wasm_common::{ use wasm_common::{
Features, LocalFunctionIndex, MemoryIndex, OwnedDataInitializer, SignatureIndex, TableIndex, Features, FunctionIndex, LocalFunctionIndex, MemoryIndex, OwnedDataInitializer, SignatureIndex,
TableIndex,
}; };
use wasmer_compiler::{FunctionBody, JumpTableOffsets, Relocation, SectionBody, SectionIndex}; use wasmer_compiler::{FunctionBody, JumpTableOffsets, Relocation, SectionBody, SectionIndex};
use wasmer_engine::SerializableFunctionFrameInfo; use wasmer_engine::SerializableFunctionFrameInfo;
@@ -19,8 +20,10 @@ pub struct SerializableCompilation {
// to allow lazy frame_info deserialization, we convert it to it's lazy binary // to allow lazy frame_info deserialization, we convert it to it's lazy binary
// format upon serialization. // format upon serialization.
pub function_frame_info: PrimaryMap<LocalFunctionIndex, SerializableFunctionFrameInfo>, pub function_frame_info: PrimaryMap<LocalFunctionIndex, SerializableFunctionFrameInfo>,
pub trampolines: PrimaryMap<SignatureIndex, FunctionBody>, pub function_call_trampolines: PrimaryMap<SignatureIndex, FunctionBody>,
pub dynamic_function_trampolines: PrimaryMap<FunctionIndex, FunctionBody>,
pub custom_sections: PrimaryMap<SectionIndex, SectionBody>, pub custom_sections: PrimaryMap<SectionIndex, SectionBody>,
pub custom_section_relocations: PrimaryMap<SectionIndex, Vec<Relocation>>,
} }
/// Serializable struct that is able to serialize from and to /// Serializable struct that is able to serialize from and to

View File

@@ -151,7 +151,7 @@ impl Engine for NativeEngine {
} }
/// Retrieves a trampoline given a signature /// Retrieves a trampoline given a signature
fn trampoline(&self, sig: VMSharedSignatureIndex) -> Option<VMTrampoline> { fn function_call_trampoline(&self, sig: VMSharedSignatureIndex) -> Option<VMTrampoline> {
self.inner().trampoline(sig) self.inner().trampoline(sig)
} }

View File

@@ -15,7 +15,7 @@ use tempfile::NamedTempFile;
use wasm_common::entity::{BoxedSlice, EntityRef, PrimaryMap}; use wasm_common::entity::{BoxedSlice, EntityRef, PrimaryMap};
use wasm_common::{ use wasm_common::{
DataInitializer, LocalFunctionIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, DataInitializer, LocalFunctionIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex,
MemoryIndex, OwnedDataInitializer, SignatureIndex, TableIndex, MemoryIndex, OwnedDataInitializer, SignatureIndex, TableIndex, FunctionIndex,
}; };
use wasmer_compiler::CompileError; use wasmer_compiler::CompileError;
#[cfg(feature = "compiler")] #[cfg(feature = "compiler")]
@@ -38,6 +38,7 @@ pub struct NativeModule {
metadata: ModuleMetadata, metadata: ModuleMetadata,
library: Library, library: Library,
finished_functions: BoxedSlice<LocalFunctionIndex, *mut [VMFunctionBody]>, finished_functions: BoxedSlice<LocalFunctionIndex, *mut [VMFunctionBody]>,
finished_dynamic_function_trampolines: BoxedSlice<FunctionIndex, *const VMFunctionBody>,
signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>, signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
} }
@@ -83,18 +84,22 @@ impl NativeModule {
table_plans.clone(), table_plans.clone(),
)?; )?;
// Compile the trampolines // Compile the function call trampolines
let func_types = translation let func_types = translation
.module .module
.signatures .signatures
.values() .values()
.cloned() .cloned()
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let trampolines = compiler let function_call_trampolines = compiler
.compile_wasm_trampolines(&func_types)? .compile_function_call_trampolines(&func_types)?
.into_iter() .into_iter()
.collect::<PrimaryMap<SignatureIndex, _>>(); .collect::<PrimaryMap<SignatureIndex, _>>();
// Compile the dynamic function trampolines
let dynamic_function_trampolines = compiler
.compile_dynamic_function_trampolines(&translation.module)?;
let data_initializers = translation let data_initializers = translation
.data_initializers .data_initializers
.iter() .iter()
@@ -168,7 +173,7 @@ impl NativeModule {
// } // }
// Add functions // Add functions
for (function_local_index, function) in function_bodies.iter() { for (function_local_index, function) in function_bodies.into_iter() {
let function_name = Self::get_function_name(&metadata, function_local_index); let function_name = Self::get_function_name(&metadata, function_local_index);
obj.declare(&function_name, Decl::function().global()) obj.declare(&function_name, Decl::function().global())
.map_err(to_compile_error)?; .map_err(to_compile_error)?;
@@ -176,9 +181,18 @@ impl NativeModule {
.map_err(to_compile_error)?; .map_err(to_compile_error)?;
} }
// Add trampolines // Add function call trampolines
for (signature_index, function) in trampolines.iter() { for (signature_index, function) in function_call_trampolines.into_iter() {
let function_name = Self::get_trampoline_name(&metadata, signature_index); let function_name = Self::get_function_call_trampoline_name(&metadata, signature_index);
obj.declare(&function_name, Decl::function().global())
.map_err(to_compile_error)?;
obj.define(&function_name, function.body.clone())
.map_err(to_compile_error)?;
}
// Add dynamic function trampolines
for (func_index, function) in dynamic_function_trampolines.into_iter() {
let function_name = Self::get_dynamic_function_trampoline_name(&metadata, func_index);
obj.declare(&function_name, Decl::function().global()) obj.declare(&function_name, Decl::function().global())
.map_err(to_compile_error)?; .map_err(to_compile_error)?;
obj.define(&function_name, function.body.clone()) obj.define(&function_name, function.body.clone())
@@ -267,8 +281,12 @@ impl NativeModule {
format!("wasmer_function_{}_{}", metadata.prefix, index.index()) format!("wasmer_function_{}_{}", metadata.prefix, index.index())
} }
fn get_trampoline_name(metadata: &ModuleMetadata, index: SignatureIndex) -> String { fn get_function_call_trampoline_name(metadata: &ModuleMetadata, index: SignatureIndex) -> String {
format!("wasmer_trampoline_{}_{}", metadata.prefix, index.index()) format!("wasmer_trampoline_function_call_{}_{}", metadata.prefix, index.index())
}
fn get_dynamic_function_trampoline_name(metadata: &ModuleMetadata, index: FunctionIndex) -> String {
format!("wasmer_trampoline_dynamic_function_{}_{}", metadata.prefix, index.index())
} }
/// Construct a `NativeModule` from component parts. /// Construct a `NativeModule` from component parts.
@@ -299,6 +317,39 @@ impl NativeModule {
} }
} }
// Retrieve function call trampolines (for all signatures in the module)
for (sig_index, func_type) in metadata.module.signatures.iter() {
let function_name = Self::get_function_call_trampoline_name(&metadata, sig_index);
unsafe {
let trampoline: Symbol<VMTrampoline> = lib
.get(function_name.as_bytes())
.map_err(to_compile_error)?;
engine_inner.add_trampoline(&func_type, *trampoline);
}
}
// Retrieve dynamic function trampolines (only for imported functions)
let mut finished_dynamic_function_trampolines: PrimaryMap<FunctionIndex, *const VMFunctionBody> =
PrimaryMap::with_capacity(metadata.module.num_imported_funcs);
for func_index in metadata.module.functions.keys().take(metadata.module.num_imported_funcs) {
let function_name = Self::get_dynamic_function_trampoline_name(&metadata, func_index);
unsafe {
let trampoline: Symbol<*const VMFunctionBody> = lib
.get(function_name.as_bytes())
.map_err(to_compile_error)?;
finished_dynamic_function_trampolines.push(*trampoline);
}
}
// Leaving frame infos from now, as they are not yet used
// however they might be useful for the future.
// let frame_infos = compilation
// .get_frame_info()
// .values()
// .map(|frame_info| SerializableFunctionFrameInfo::Processed(frame_info.clone()))
// .collect::<PrimaryMap<LocalFunctionIndex, _>>();
// Self::from_parts(&mut engine_inner, lib, metadata, )
// Compute indices into the shared signature table. // Compute indices into the shared signature table.
let signatures = { let signatures = {
let signature_registry = engine_inner.signatures(); let signature_registry = engine_inner.signatures();
@@ -310,29 +361,13 @@ impl NativeModule {
.collect::<PrimaryMap<_, _>>() .collect::<PrimaryMap<_, _>>()
}; };
for (sig_index, func_type) in metadata.module.signatures.iter() {
let function_name = Self::get_trampoline_name(&metadata, sig_index);
unsafe {
let trampoline: Symbol<VMTrampoline> = lib
.get(function_name.as_bytes())
.map_err(to_compile_error)?;
engine_inner.add_trampoline(&func_type, *trampoline);
}
}
// Leaving frame infos from now, as they are not yet used
// however they might be useful for the future.
// let frame_infos = compilation
// .get_frame_info()
// .values()
// .map(|frame_info| SerializableFunctionFrameInfo::Processed(frame_info.clone()))
// .collect::<PrimaryMap<LocalFunctionIndex, _>>();
// Self::from_parts(&mut engine_inner, lib, metadata, )
Ok(Self { Ok(Self {
sharedobject_path, sharedobject_path,
metadata, metadata,
library: lib, library: lib,
finished_functions: finished_functions.into_boxed_slice(), finished_functions: finished_functions.into_boxed_slice(),
finished_dynamic_function_trampolines: finished_dynamic_function_trampolines
.into_boxed_slice(),
signatures: signatures.into_boxed_slice(), signatures: signatures.into_boxed_slice(),
}) })
} }
@@ -410,6 +445,7 @@ impl NativeModule {
&self.module(), &self.module(),
&sig_registry, &sig_registry,
resolver, resolver,
&self.finished_dynamic_function_trampolines,
self.memory_plans(), self.memory_plans(),
self.table_plans(), self.table_plans(),
) )

View File

@@ -15,7 +15,7 @@ use wasmer_runtime::{InstanceHandle, VMSharedSignatureIndex, VMTrampoline};
/// such as: JIT or Native. /// such as: JIT or Native.
pub trait Engine { pub trait Engine {
/// Get the tunables /// Get the tunables
fn tunables(&self) -> &Tunables; fn tunables(&self) -> &dyn Tunables;
/// Register a signature /// Register a signature
fn register_signature(&self, func_type: &FunctionType) -> VMSharedSignatureIndex; fn register_signature(&self, func_type: &FunctionType) -> VMSharedSignatureIndex;
@@ -24,13 +24,13 @@ pub trait Engine {
fn lookup_signature(&self, sig: VMSharedSignatureIndex) -> Option<FunctionType>; fn lookup_signature(&self, sig: VMSharedSignatureIndex) -> Option<FunctionType>;
/// Retrieves a trampoline given a signature /// Retrieves a trampoline given a signature
fn trampoline(&self, sig: VMSharedSignatureIndex) -> Option<VMTrampoline>; fn function_call_trampoline(&self, sig: VMSharedSignatureIndex) -> Option<VMTrampoline>;
/// Validates a WebAssembly module /// Validates a WebAssembly module
fn validate(&self, binary: &[u8]) -> Result<(), CompileError>; fn validate(&self, binary: &[u8]) -> Result<(), CompileError>;
/// Compile a WebAssembly binary /// Compile a WebAssembly binary
fn compile(&self, binary: &[u8]) -> Result<Arc<CompiledModule>, CompileError>; fn compile(&self, binary: &[u8]) -> Result<Arc<dyn CompiledModule>, CompileError>;
/// Instantiates a WebAssembly module /// Instantiates a WebAssembly module
unsafe fn instantiate( unsafe fn instantiate(
@@ -50,13 +50,13 @@ pub trait Engine {
fn serialize(&self, compiled_module: &dyn CompiledModule) -> Result<Vec<u8>, SerializeError>; fn serialize(&self, compiled_module: &dyn CompiledModule) -> Result<Vec<u8>, SerializeError>;
/// Deserializes a WebAssembly module /// Deserializes a WebAssembly module
fn deserialize(&self, bytes: &[u8]) -> Result<Arc<CompiledModule>, DeserializeError>; fn deserialize(&self, bytes: &[u8]) -> Result<Arc<dyn CompiledModule>, DeserializeError>;
/// Deserializes a WebAssembly module from a path /// Deserializes a WebAssembly module from a path
fn deserialize_from_file( fn deserialize_from_file(
&self, &self,
file_ref: &Path, file_ref: &Path,
) -> Result<Arc<CompiledModule>, DeserializeError> { ) -> Result<Arc<dyn CompiledModule>, DeserializeError> {
let bytes = std::fs::read(file_ref)?; let bytes = std::fs::read(file_ref)?;
self.deserialize(&bytes) self.deserialize(&bytes)
} }

View File

@@ -1,6 +1,3 @@
use crate::error::InstantiationError;
use std::sync::Arc;
use wasmer_runtime::InstanceHandle;
use wasmer_runtime::Module; use wasmer_runtime::Module;
use downcast_rs::{impl_downcast, Downcast}; use downcast_rs::{impl_downcast, Downcast};

View File

@@ -3,11 +3,11 @@
use crate::error::{ImportError, LinkError}; use crate::error::{ImportError, LinkError};
use more_asserts::assert_ge; use more_asserts::assert_ge;
use wasm_common::entity::PrimaryMap; use wasm_common::entity::{BoxedSlice, EntityRef, PrimaryMap};
use wasm_common::{ExternType, ImportIndex, MemoryIndex, TableIndex}; use wasm_common::{ExternType, FunctionIndex, ImportIndex, MemoryIndex, TableIndex};
use wasmer_runtime::{ use wasmer_runtime::{
Export, Imports, SignatureRegistry, VMFunctionImport, VMGlobalImport, VMMemoryImport, Export, Imports, SignatureRegistry, VMFunctionBody, VMFunctionImport, VMFunctionKind,
VMTableImport, VMGlobalImport, VMMemoryImport, VMTableImport,
}; };
use wasmer_runtime::{MemoryPlan, TablePlan}; use wasmer_runtime::{MemoryPlan, TablePlan};
@@ -43,15 +43,15 @@ fn get_extern_from_import(module: &Module, import_index: &ImportIndex) -> Extern
ExternType::Function(func) ExternType::Function(func)
} }
ImportIndex::Table(index) => { ImportIndex::Table(index) => {
let table = module.tables[*index].clone(); let table = module.tables[*index];
ExternType::Table(table) ExternType::Table(table)
} }
ImportIndex::Memory(index) => { ImportIndex::Memory(index) => {
let memory = module.memories[*index].clone(); let memory = module.memories[*index];
ExternType::Memory(memory) ExternType::Memory(memory)
} }
ImportIndex::Global(index) => { ImportIndex::Global(index) => {
let global = module.globals[*index].clone(); let global = module.globals[*index];
ExternType::Global(global) ExternType::Global(global)
} }
} }
@@ -65,19 +65,19 @@ fn get_extern_from_export(
) -> ExternType { ) -> ExternType {
match export { match export {
Export::Function(ref f) => { Export::Function(ref f) => {
let func = signatures.lookup(f.signature).unwrap().clone(); let func = signatures.lookup(f.signature).unwrap();
ExternType::Function(func) ExternType::Function(func)
} }
Export::Table(ref t) => { Export::Table(ref t) => {
let table = t.plan().table.clone(); let table = t.plan().table;
ExternType::Table(table) ExternType::Table(table)
} }
Export::Memory(ref m) => { Export::Memory(ref m) => {
let memory = m.plan().memory.clone(); let memory = m.plan().memory;
ExternType::Memory(memory) ExternType::Memory(memory)
} }
Export::Global(ref g) => { Export::Global(ref g) => {
let global = g.global.clone(); let global = g.global;
ExternType::Global(global) ExternType::Global(global)
} }
} }
@@ -91,6 +91,7 @@ pub fn resolve_imports(
module: &Module, module: &Module,
signatures: &SignatureRegistry, signatures: &SignatureRegistry,
resolver: &dyn Resolver, resolver: &dyn Resolver,
finished_dynamic_function_trampolines: &BoxedSlice<FunctionIndex, *const VMFunctionBody>,
memory_plans: &PrimaryMap<MemoryIndex, MemoryPlan>, memory_plans: &PrimaryMap<MemoryIndex, MemoryPlan>,
_table_plans: &PrimaryMap<TableIndex, TablePlan>, _table_plans: &PrimaryMap<TableIndex, TablePlan>,
) -> Result<Imports, LinkError> { ) -> Result<Imports, LinkError> {
@@ -122,8 +123,21 @@ pub fn resolve_imports(
} }
match resolved { match resolved {
Export::Function(ref f) => { Export::Function(ref f) => {
let address = match f.kind {
VMFunctionKind::Dynamic => {
// If this is a dynamic imported function,
// the address of the funciton is the address of the
// reverse trampoline.
let index = FunctionIndex::new(function_imports.len());
finished_dynamic_function_trampolines[index]
// TODO: We should check that the f.vmctx actually matches
// the shape of `VMDynamicFunctionImportContext`
}
VMFunctionKind::Static => f.address,
};
function_imports.push(VMFunctionImport { function_imports.push(VMFunctionImport {
body: f.address, body: address,
vmctx: f.vmctx, vmctx: f.vmctx,
}); });
} }

View File

@@ -100,7 +100,7 @@ impl<'de> Deserialize<'de> for SerializableFunctionFrameInfo {
where where
D: Deserializer<'de>, D: Deserializer<'de>,
{ {
Ok(SerializableFunctionFrameInfo::Unprocessed( Ok(Self::Unprocessed(
deserializer.deserialize_byte_buf(FunctionFrameInfoVisitor)?, deserializer.deserialize_byte_buf(FunctionFrameInfoVisitor)?,
)) ))
} }

View File

@@ -124,12 +124,16 @@ impl RuntimeError {
.any(|pc| info.should_process_frame(*pc).unwrap_or(false)) .any(|pc| info.should_process_frame(*pc).unwrap_or(false))
{ {
// We drop the read lock, to get a write one. // We drop the read lock, to get a write one.
// Note: this is not guaranteed because it's a RwLock:
// the following code may cause deadlocks.
// TODO: clean up this code
drop(info); drop(info);
let mut info = FRAME_INFO.write().unwrap(); {
for pc in frames.iter() { let mut info = FRAME_INFO.write().unwrap();
drop(info.maybe_process_frame(*pc)); for pc in frames.iter() {
info.maybe_process_frame(*pc);
}
} }
drop(info);
FRAME_INFO.read().unwrap() FRAME_INFO.read().unwrap()
} else { } else {
info info
@@ -182,7 +186,7 @@ impl fmt::Display for RuntimeError {
for frame in self.trace().iter() { for frame in self.trace().iter() {
let name = frame.module_name(); let name = frame.module_name();
let func_index = frame.func_index(); let func_index = frame.func_index();
writeln!(f, "")?; writeln!(f)?;
write!(f, " at ")?; write!(f, " at ")?;
match frame.func_name() { match frame.func_name() {
Some(name) => match rustc_demangle::try_demangle(name) { Some(name) => match rustc_demangle::try_demangle(name) {

View File

@@ -66,7 +66,7 @@ impl ModuleFrameInfo {
} }
fn process_function_debug_info(&mut self, local_index: LocalFunctionIndex) { fn process_function_debug_info(&mut self, local_index: LocalFunctionIndex) {
let mut func = self.frame_infos.get_mut(local_index).unwrap(); let func = self.frame_infos.get_mut(local_index).unwrap();
let processed: CompiledFunctionFrameInfo = match func { let processed: CompiledFunctionFrameInfo = match func {
SerializableFunctionFrameInfo::Processed(_) => { SerializableFunctionFrameInfo::Processed(_) => {
// This should be a no-op on processed info // This should be a no-op on processed info
@@ -187,7 +187,8 @@ impl GlobalFrameInfo {
pub fn maybe_process_frame(&mut self, pc: usize) -> Option<()> { pub fn maybe_process_frame(&mut self, pc: usize) -> Option<()> {
let module = self.module_info_mut(pc)?; let module = self.module_info_mut(pc)?;
let func = module.function_info(pc)?; let func = module.function_info(pc)?;
module.process_function_debug_info(func.local_index); let func_local_index = func.local_index;
module.process_function_debug_info(func_local_index);
Some(()) Some(())
} }
@@ -246,7 +247,7 @@ pub fn register(
}; };
assert!(functions.insert(end, func).is_none()); assert!(functions.insert(end, func).is_none());
} }
if functions.len() == 0 { if functions.is_empty() {
return None; return None;
} }
@@ -266,7 +267,7 @@ pub fn register(
ModuleFrameInfo { ModuleFrameInfo {
start: min, start: min,
functions, functions,
module: module.clone(), module,
frame_infos, frame_infos,
}, },
); );

View File

@@ -1,9 +1,10 @@
use crate::error::LinkError; use crate::error::LinkError;
use wasm_common::entity::{EntityRef, PrimaryMap}; use wasm_common::entity::{EntityRef, PrimaryMap};
use wasm_common::{ use wasm_common::{
GlobalIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, MemoryIndex, MemoryType, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, MemoryIndex, MemoryType, TableIndex,
TableIndex, TableType, TableType,
}; };
use wasmer_runtime::MemoryError;
use wasmer_runtime::{LinearMemory, Module, Table, VMGlobalDefinition}; use wasmer_runtime::{LinearMemory, Module, Table, VMGlobalDefinition};
use wasmer_runtime::{MemoryPlan, TablePlan}; use wasmer_runtime::{MemoryPlan, TablePlan};
@@ -16,7 +17,7 @@ pub trait Tunables {
fn table_plan(&self, table: TableType) -> TablePlan; fn table_plan(&self, table: TableType) -> TablePlan;
/// Create a memory given a memory type /// Create a memory given a memory type
fn create_memory(&self, memory_type: MemoryPlan) -> Result<LinearMemory, String>; fn create_memory(&self, memory_type: MemoryPlan) -> Result<LinearMemory, MemoryError>;
/// Create a memory given a memory type /// Create a memory given a memory type
fn create_table(&self, table_type: TablePlan) -> Result<Table, String>; fn create_table(&self, table_type: TablePlan) -> Result<Table, String>;
@@ -32,7 +33,10 @@ pub trait Tunables {
PrimaryMap::with_capacity(module.memories.len() - num_imports); PrimaryMap::with_capacity(module.memories.len() - num_imports);
for index in num_imports..module.memories.len() { for index in num_imports..module.memories.len() {
let plan = memory_plans[MemoryIndex::new(index)].clone(); let plan = memory_plans[MemoryIndex::new(index)].clone();
memories.push(self.create_memory(plan).map_err(LinkError::Resource)?); memories.push(
self.create_memory(plan)
.map_err(|e| LinkError::Resource(format!("Failed to create memory: {}", e)))?,
);
} }
Ok(memories) Ok(memories)
} }

View File

@@ -13,7 +13,7 @@ edition = "2018"
[dependencies] [dependencies]
wasm-common = { path = "../wasm-common", version = "0.16.2", features = ["enable-serde"] } wasm-common = { path = "../wasm-common", version = "0.16.2", features = ["enable-serde"] }
region = "2.1.2" region = "2.1.2"
libc = { version = "0.2", default-features = false } libc = { version = "0.2.70", default-features = false }
memoffset = "0.5.4" memoffset = "0.5.4"
indexmap = { version = "1.3.2", features = ["serde-1"] } indexmap = { version = "1.3.2", features = ["serde-1"] }
thiserror = "1.0.16" thiserror = "1.0.16"

View File

@@ -1,3 +1,5 @@
//! Runtime build script compiles C code using setjmp for trap handling.
fn main() { fn main() {
println!("cargo:rerun-if-changed=src/trap/helpers.c"); println!("cargo:rerun-if-changed=src/trap/helpers.c");
cc::Build::new() cc::Build::new()

View File

@@ -2,8 +2,8 @@ use crate::memory::LinearMemory;
use crate::module::{MemoryPlan, TablePlan}; use crate::module::{MemoryPlan, TablePlan};
use crate::table::Table; use crate::table::Table;
use crate::vmcontext::{ use crate::vmcontext::{
VMContext, VMFunctionBody, VMGlobalDefinition, VMMemoryDefinition, VMSharedSignatureIndex, VMContext, VMFunctionBody, VMFunctionKind, VMGlobalDefinition, VMMemoryDefinition,
VMTableDefinition, VMSharedSignatureIndex, VMTableDefinition,
}; };
use wasm_common::GlobalType; use wasm_common::GlobalType;
@@ -34,11 +34,13 @@ pub struct ExportFunction {
/// ///
/// Note that this indexes within the module associated with `vmctx`. /// Note that this indexes within the module associated with `vmctx`.
pub signature: VMSharedSignatureIndex, pub signature: VMSharedSignatureIndex,
/// The function kind (it defines how it's the signature that provided `address` have)
pub kind: VMFunctionKind,
} }
impl From<ExportFunction> for Export { impl From<ExportFunction> for Export {
fn from(func: ExportFunction) -> Export { fn from(func: ExportFunction) -> Self {
Export::Function(func) Self::Function(func)
} }
} }
@@ -59,8 +61,8 @@ impl ExportTable {
} }
impl From<ExportTable> for Export { impl From<ExportTable> for Export {
fn from(table: ExportTable) -> Export { fn from(table: ExportTable) -> Self {
Export::Table(table) Self::Table(table)
} }
} }
@@ -81,8 +83,8 @@ impl ExportMemory {
} }
impl From<ExportMemory> for Export { impl From<ExportMemory> for Export {
fn from(memory: ExportMemory) -> Export { fn from(memory: ExportMemory) -> Self {
Export::Memory(memory) Self::Memory(memory)
} }
} }
@@ -96,7 +98,7 @@ pub struct ExportGlobal {
} }
impl From<ExportGlobal> for Export { impl From<ExportGlobal> for Export {
fn from(global: ExportGlobal) -> Export { fn from(global: ExportGlobal) -> Self {
Export::Global(global) Self::Global(global)
} }
} }

View File

@@ -1,5 +1,4 @@
use crate::vmcontext::{VMFunctionImport, VMGlobalImport, VMMemoryImport, VMTableImport}; use crate::vmcontext::{VMFunctionImport, VMGlobalImport, VMMemoryImport, VMTableImport};
use std::collections::HashSet;
use wasm_common::entity::{BoxedSlice, PrimaryMap}; use wasm_common::entity::{BoxedSlice, PrimaryMap};
use wasm_common::{FunctionIndex, GlobalIndex, MemoryIndex, TableIndex}; use wasm_common::{FunctionIndex, GlobalIndex, MemoryIndex, TableIndex};

View File

@@ -3,13 +3,13 @@
//! `InstanceHandle` is a reference-counting handle for an `Instance`. //! `InstanceHandle` is a reference-counting handle for an `Instance`.
use crate::export::Export; use crate::export::Export;
use crate::imports::Imports; use crate::imports::Imports;
use crate::memory::LinearMemory; use crate::memory::{LinearMemory, MemoryError};
use crate::table::Table; use crate::table::Table;
use crate::trap::{catch_traps, init_traps, Trap, TrapCode}; use crate::trap::{catch_traps, init_traps, Trap, TrapCode};
use crate::vmcontext::{ use crate::vmcontext::{
VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionBody, VMFunctionImport, VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionBody, VMFunctionImport,
VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition, VMMemoryImport, VMSharedSignatureIndex, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition, VMMemoryImport,
VMTableDefinition, VMTableImport, VMSharedSignatureIndex, VMTableDefinition, VMTableImport,
}; };
use crate::{ExportFunction, ExportGlobal, ExportMemory, ExportTable}; use crate::{ExportFunction, ExportGlobal, ExportMemory, ExportTable};
use crate::{Module, TableElements, VMOffsets}; use crate::{Module, TableElements, VMOffsets};
@@ -18,7 +18,7 @@ use more_asserts::assert_lt;
use std::alloc::{self, Layout}; use std::alloc::{self, Layout};
use std::any::Any; use std::any::Any;
use std::cell::{Cell, RefCell}; use std::cell::{Cell, RefCell};
use std::collections::{HashMap, HashSet}; use std::collections::HashMap;
use std::convert::TryFrom; use std::convert::TryFrom;
use std::sync::Arc; use std::sync::Arc;
use std::{mem, ptr, slice}; use std::{mem, ptr, slice};
@@ -294,6 +294,11 @@ impl Instance {
}; };
ExportFunction { ExportFunction {
address, address,
// Any function received is already static at this point as:
// 1. All locally defined functions in the Wasm have a static signature.
// 2. All the imported functions are already static (because
// they point to the trampolines rather than the dynamic addresses).
kind: VMFunctionKind::Static,
signature, signature,
vmctx, vmctx,
} }
@@ -431,7 +436,7 @@ impl Instance {
&self, &self,
memory_index: LocalMemoryIndex, memory_index: LocalMemoryIndex,
delta: IntoPages, delta: IntoPages,
) -> Option<Pages> ) -> Result<Pages, MemoryError>
where where
IntoPages: Into<Pages>, IntoPages: Into<Pages>,
{ {
@@ -459,7 +464,7 @@ impl Instance {
&self, &self,
memory_index: MemoryIndex, memory_index: MemoryIndex,
delta: IntoPages, delta: IntoPages,
) -> Option<Pages> ) -> Result<Pages, MemoryError>
where where
IntoPages: Into<Pages>, IntoPages: Into<Pages>,
{ {
@@ -815,6 +820,7 @@ impl InstanceHandle {
vmctx: VMContext {}, vmctx: VMContext {},
}; };
let layout = instance.alloc_layout(); let layout = instance.alloc_layout();
#[allow(clippy::cast_ptr_alignment)]
let instance_ptr = alloc::alloc(layout) as *mut Instance; let instance_ptr = alloc::alloc(layout) as *mut Instance;
if instance_ptr.is_null() { if instance_ptr.is_null() {
alloc::handle_alloc_error(layout); alloc::handle_alloc_error(layout);
@@ -979,7 +985,7 @@ impl InstanceHandle {
&self, &self,
memory_index: LocalMemoryIndex, memory_index: LocalMemoryIndex,
delta: IntoPages, delta: IntoPages,
) -> Option<Pages> ) -> Result<Pages, MemoryError>
where where
IntoPages: Into<Pages>, IntoPages: Into<Pages>,
{ {
@@ -1070,7 +1076,7 @@ fn check_table_init_bounds(instance: &Instance) -> Result<(), Trap> {
let size = usize::try_from(table.size()).unwrap(); let size = usize::try_from(table.size()).unwrap();
if size < start + init.elements.len() { if size < start + init.elements.len() {
return Err(Trap::wasm(TrapCode::TableSetterOutOfBounds).into()); return Err(Trap::wasm(TrapCode::TableSetterOutOfBounds));
} }
} }
@@ -1095,6 +1101,7 @@ fn get_memory_init_start(init: &DataInitializer<'_>, instance: &Instance) -> usi
start start
} }
#[allow(clippy::mut_from_ref)]
/// Return a byte-slice view of a memory's data. /// Return a byte-slice view of a memory's data.
unsafe fn get_memory_slice<'instance>( unsafe fn get_memory_slice<'instance>(
init: &DataInitializer<'_>, init: &DataInitializer<'_>,
@@ -1121,7 +1128,7 @@ fn check_memory_init_bounds(
unsafe { unsafe {
let mem_slice = get_memory_slice(init, instance); let mem_slice = get_memory_slice(init, instance);
if mem_slice.get_mut(start..start + init.data.len()).is_none() { if mem_slice.get_mut(start..start + init.data.len()).is_none() {
return Err(Trap::wasm(TrapCode::HeapSetterOutOfBounds).into()); return Err(Trap::wasm(TrapCode::HeapSetterOutOfBounds));
} }
} }
} }
@@ -1158,7 +1165,7 @@ fn initialize_tables(instance: &Instance) -> Result<(), Trap> {
.checked_add(init.elements.len()) .checked_add(init.elements.len())
.map_or(true, |end| end > table.size() as usize) .map_or(true, |end| end > table.size() as usize)
{ {
return Err(Trap::wasm(TrapCode::TableAccessOutOfBounds).into()); return Err(Trap::wasm(TrapCode::TableAccessOutOfBounds));
} }
for (i, func_idx) in init.elements.iter().enumerate() { for (i, func_idx) in init.elements.iter().enumerate() {
@@ -1213,7 +1220,7 @@ fn initialize_memories(
.checked_add(init.data.len()) .checked_add(init.data.len())
.map_or(true, |end| end > memory.current_length) .map_or(true, |end| end > memory.current_length)
{ {
return Err(Trap::wasm(TrapCode::HeapAccessOutOfBounds).into()); return Err(Trap::wasm(TrapCode::HeapAccessOutOfBounds));
} }
unsafe { unsafe {

View File

@@ -39,7 +39,7 @@ pub mod libcalls;
pub use crate::export::*; pub use crate::export::*;
pub use crate::imports::Imports; pub use crate::imports::Imports;
pub use crate::instance::InstanceHandle; pub use crate::instance::InstanceHandle;
pub use crate::memory::LinearMemory; pub use crate::memory::{LinearMemory, MemoryError};
pub use crate::mmap::Mmap; pub use crate::mmap::Mmap;
pub use crate::module::{ pub use crate::module::{
ExportsIterator, ImportsIterator, MemoryPlan, MemoryStyle, Module, TableElements, TablePlan, ExportsIterator, ImportsIterator, MemoryPlan, MemoryStyle, Module, TableElements, TablePlan,
@@ -50,9 +50,10 @@ pub use crate::sig_registry::SignatureRegistry;
pub use crate::table::Table; pub use crate::table::Table;
pub use crate::trap::*; pub use crate::trap::*;
pub use crate::vmcontext::{ pub use crate::vmcontext::{
VMBuiltinFunctionIndex, VMCallerCheckedAnyfunc, VMContext, VMFunctionBody, VMFunctionImport, VMBuiltinFunctionIndex, VMCallerCheckedAnyfunc, VMContext, VMDynamicFunctionImportContext,
VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition, VMMemoryImport, VMSharedSignatureIndex, VMFunctionBody, VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport,
VMTableDefinition, VMTableImport, VMTrampoline, VMMemoryDefinition, VMMemoryImport, VMSharedSignatureIndex, VMTableDefinition, VMTableImport,
VMTrampoline,
}; };
pub use crate::vmoffsets::{TargetSharedSignatureIndex, VMOffsets}; pub use crate::vmoffsets::{TargetSharedSignatureIndex, VMOffsets};

View File

@@ -7,7 +7,34 @@ use crate::module::{MemoryPlan, MemoryStyle};
use crate::vmcontext::VMMemoryDefinition; use crate::vmcontext::VMMemoryDefinition;
use more_asserts::{assert_ge, assert_le}; use more_asserts::{assert_ge, assert_le};
use std::cell::RefCell; use std::cell::RefCell;
use wasm_common::Pages; use thiserror::Error;
use wasm_common::{Bytes, Pages};
/// Error type describing things that can go wrong when operating on Wasm Memories.
#[derive(Error, Debug, Clone, PartialEq, Hash)]
pub enum MemoryError {
/// Low level error with mmap.
#[error("Error when allocating memory: {0}")]
Region(String),
/// The operation would cause the size of the memory to exceed the maximum or would cause
/// an overflow leading to unindexable memory.
#[error("The memory could not grow: current size {} pages, requested increase: {} pages", current.0, attempted_delta.0)]
CouldNotGrow {
/// The current size in pages.
current: Pages,
/// The attempted amount to grow by in pages.
attempted_delta: Pages,
},
/// The operation would cause the size of the memory size exceed the maximum.
#[error("The memory plan is invalid because {}", reason)]
InvalidMemoryPlan {
/// The reason why the memory plan is invalid.
reason: String,
},
/// A user defined error value, used for error cases not listed above.
#[error("A user-defined error occurred: {0}")]
Generic(String),
}
/// A linear memory instance. /// A linear memory instance.
#[derive(Debug)] #[derive(Debug)]
@@ -40,13 +67,23 @@ struct WasmMmap {
impl LinearMemory { impl LinearMemory {
/// Create a new linear memory instance with specified minimum and maximum number of wasm pages. /// Create a new linear memory instance with specified minimum and maximum number of wasm pages.
pub fn new(plan: &MemoryPlan) -> Result<Self, String> { pub fn new(plan: &MemoryPlan) -> Result<Self, MemoryError> {
// `maximum` cannot be set to more than `65536` pages. // `maximum` cannot be set to more than `65536` pages.
assert_le!(plan.memory.minimum, Pages::max_value()); assert_le!(plan.memory.minimum, Pages::max_value());
assert!( assert!(
plan.memory.maximum.is_none() || plan.memory.maximum.unwrap() <= Pages::max_value() plan.memory.maximum.is_none() || plan.memory.maximum.unwrap() <= Pages::max_value()
); );
if plan.memory.maximum.is_some() && plan.memory.maximum.unwrap() < plan.memory.minimum {
return Err(MemoryError::InvalidMemoryPlan {
reason: format!(
"the maximum ({} pages) is less than the minimum ({} pages)",
plan.memory.maximum.unwrap().0,
plan.memory.minimum.0
),
});
}
let offset_guard_bytes = plan.offset_guard_size as usize; let offset_guard_bytes = plan.offset_guard_size as usize;
// If we have an offset guard, or if we're doing the static memory // If we have an offset guard, or if we're doing the static memory
@@ -71,7 +108,8 @@ impl LinearMemory {
let mapped_bytes = mapped_pages.bytes(); let mapped_bytes = mapped_pages.bytes();
let mmap = WasmMmap { let mmap = WasmMmap {
alloc: Mmap::accessible_reserved(mapped_bytes.0, request_bytes)?, alloc: Mmap::accessible_reserved(mapped_bytes.0, request_bytes)
.map_err(MemoryError::Region)?,
size: plan.memory.minimum, size: plan.memory.minimum,
}; };
@@ -98,7 +136,7 @@ impl LinearMemory {
/// ///
/// Returns `None` if memory can't be grown by the specified amount /// Returns `None` if memory can't be grown by the specified amount
/// of wasm pages. /// of wasm pages.
pub fn grow<IntoPages>(&self, delta: IntoPages) -> Option<Pages> pub fn grow<IntoPages>(&self, delta: IntoPages) -> Result<Pages, MemoryError>
where where
IntoPages: Into<Pages>, IntoPages: Into<Pages>,
{ {
@@ -106,20 +144,24 @@ impl LinearMemory {
let delta: Pages = delta.into(); let delta: Pages = delta.into();
let mut mmap = self.mmap.borrow_mut(); let mut mmap = self.mmap.borrow_mut();
if delta.0 == 0 { if delta.0 == 0 {
return Some(mmap.size); return Ok(mmap.size);
} }
let new_pages = match mmap.size.checked_add(delta) { let new_pages = mmap
Some(new_pages) => new_pages, .size
// Linear memory size overflow. .checked_add(delta)
None => return None, .ok_or_else(|| MemoryError::CouldNotGrow {
}; current: mmap.size,
attempted_delta: delta,
})?;
let prev_pages = mmap.size; let prev_pages = mmap.size;
if let Some(maximum) = self.maximum { if let Some(maximum) = self.maximum {
if new_pages > maximum { if new_pages > maximum {
// Linear memory size would exceed the declared maximum. return Err(MemoryError::CouldNotGrow {
return None; current: mmap.size,
attempted_delta: delta,
});
} }
} }
@@ -128,7 +170,10 @@ impl LinearMemory {
// limit here. // limit here.
if new_pages >= Pages::max_value() { if new_pages >= Pages::max_value() {
// Linear memory size would exceed the index range. // Linear memory size would exceed the index range.
return None; return Err(MemoryError::CouldNotGrow {
current: mmap.size,
attempted_delta: delta,
});
} }
let delta_bytes = delta.bytes().0; let delta_bytes = delta.bytes().0;
@@ -139,9 +184,16 @@ impl LinearMemory {
// If the new size is within the declared maximum, but needs more memory than we // If the new size is within the declared maximum, but needs more memory than we
// have on hand, it's a dynamic heap and it can move. // have on hand, it's a dynamic heap and it can move.
let guard_bytes = self.offset_guard_size; let guard_bytes = self.offset_guard_size;
let request_bytes = new_bytes.checked_add(guard_bytes)?; let request_bytes =
new_bytes
.checked_add(guard_bytes)
.ok_or_else(|| MemoryError::CouldNotGrow {
current: new_pages,
attempted_delta: Bytes(guard_bytes).into(),
})?;
let mut new_mmap = Mmap::accessible_reserved(new_bytes, request_bytes).ok()?; let mut new_mmap =
Mmap::accessible_reserved(new_bytes, request_bytes).map_err(MemoryError::Region)?;
let copy_len = mmap.alloc.len() - self.offset_guard_size; let copy_len = mmap.alloc.len() - self.offset_guard_size;
new_mmap.as_mut_slice()[..copy_len].copy_from_slice(&mmap.alloc.as_slice()[..copy_len]); new_mmap.as_mut_slice()[..copy_len].copy_from_slice(&mmap.alloc.as_slice()[..copy_len]);
@@ -149,12 +201,14 @@ impl LinearMemory {
mmap.alloc = new_mmap; mmap.alloc = new_mmap;
} else if delta_bytes > 0 { } else if delta_bytes > 0 {
// Make the newly allocated pages accessible. // Make the newly allocated pages accessible.
mmap.alloc.make_accessible(prev_bytes, delta_bytes).ok()?; mmap.alloc
.make_accessible(prev_bytes, delta_bytes)
.map_err(MemoryError::Region)?;
} }
mmap.size = new_pages; mmap.size = new_pages;
Some(prev_pages) Ok(prev_pages)
} }
/// Return a `VMMemoryDefinition` for exposing the memory to compiled wasm code. /// Return a `VMMemoryDefinition` for exposing the memory to compiled wasm code.
@@ -170,7 +224,7 @@ impl LinearMemory {
/// ///
/// This function is used in the `wasmer_runtime::Instance` to retrieve /// This function is used in the `wasmer_runtime::Instance` to retrieve
/// the host memory pointer and interact with the host memory directly. /// the host memory pointer and interact with the host memory directly.
pub fn as_mut_ptr(&self) -> *mut LinearMemory { pub fn as_mut_ptr(&self) -> *mut Self {
self as *const LinearMemory as *mut LinearMemory self as *const Self as *mut Self
} }
} }

View File

@@ -258,7 +258,7 @@ impl Module {
} }
ImportIndex::Global(i) => { ImportIndex::Global(i) => {
let global_type = self.globals.get(i.clone()).unwrap(); let global_type = self.globals.get(i.clone()).unwrap();
ExternType::Global(global_type.clone()) ExternType::Global(*global_type)
} }
}; };
ImportType::new(module, field, extern_type) ImportType::new(module, field, extern_type)
@@ -347,7 +347,7 @@ impl Module {
/// Get the Module name /// Get the Module name
pub fn name(&self) -> String { pub fn name(&self) -> String {
match self.name { match self.name {
Some(ref name) => format!("{}", name), Some(ref name) => name.to_string(),
None => "<module>".to_string(), None => "<module>".to_string(),
} }
} }
@@ -387,21 +387,21 @@ impl<I: Iterator<Item = ExportType> + Sized> ExportsIterator<I> {
/// Get only the memories /// Get only the memories
pub fn memories(self) -> impl Iterator<Item = ExportType<MemoryType>> + Sized { pub fn memories(self) -> impl Iterator<Item = ExportType<MemoryType>> + Sized {
self.iter.filter_map(|extern_| match extern_.ty() { self.iter.filter_map(|extern_| match extern_.ty() {
ExternType::Memory(ty) => Some(ExportType::new(extern_.name(), ty.clone())), ExternType::Memory(ty) => Some(ExportType::new(extern_.name(), *ty)),
_ => None, _ => None,
}) })
} }
/// Get only the tables /// Get only the tables
pub fn tables(self) -> impl Iterator<Item = ExportType<TableType>> + Sized { pub fn tables(self) -> impl Iterator<Item = ExportType<TableType>> + Sized {
self.iter.filter_map(|extern_| match extern_.ty() { self.iter.filter_map(|extern_| match extern_.ty() {
ExternType::Table(ty) => Some(ExportType::new(extern_.name(), ty.clone())), ExternType::Table(ty) => Some(ExportType::new(extern_.name(), *ty)),
_ => None, _ => None,
}) })
} }
/// Get only the globals /// Get only the globals
pub fn globals(self) -> impl Iterator<Item = ExportType<GlobalType>> + Sized { pub fn globals(self) -> impl Iterator<Item = ExportType<GlobalType>> + Sized {
self.iter.filter_map(|extern_| match extern_.ty() { self.iter.filter_map(|extern_| match extern_.ty() {
ExternType::Global(ty) => Some(ExportType::new(extern_.name(), ty.clone())), ExternType::Global(ty) => Some(ExportType::new(extern_.name(), *ty)),
_ => None, _ => None,
}) })
} }
@@ -443,33 +443,21 @@ impl<I: Iterator<Item = ImportType> + Sized> ImportsIterator<I> {
/// Get only the memories /// Get only the memories
pub fn memories(self) -> impl Iterator<Item = ImportType<MemoryType>> + Sized { pub fn memories(self) -> impl Iterator<Item = ImportType<MemoryType>> + Sized {
self.iter.filter_map(|extern_| match extern_.ty() { self.iter.filter_map(|extern_| match extern_.ty() {
ExternType::Memory(ty) => Some(ImportType::new( ExternType::Memory(ty) => Some(ImportType::new(extern_.module(), extern_.name(), *ty)),
extern_.module(),
extern_.name(),
ty.clone(),
)),
_ => None, _ => None,
}) })
} }
/// Get only the tables /// Get only the tables
pub fn tables(self) -> impl Iterator<Item = ImportType<TableType>> + Sized { pub fn tables(self) -> impl Iterator<Item = ImportType<TableType>> + Sized {
self.iter.filter_map(|extern_| match extern_.ty() { self.iter.filter_map(|extern_| match extern_.ty() {
ExternType::Table(ty) => Some(ImportType::new( ExternType::Table(ty) => Some(ImportType::new(extern_.module(), extern_.name(), *ty)),
extern_.module(),
extern_.name(),
ty.clone(),
)),
_ => None, _ => None,
}) })
} }
/// Get only the globals /// Get only the globals
pub fn globals(self) -> impl Iterator<Item = ImportType<GlobalType>> + Sized { pub fn globals(self) -> impl Iterator<Item = ImportType<GlobalType>> + Sized {
self.iter.filter_map(|extern_| match extern_.ty() { self.iter.filter_map(|extern_| match extern_.ty() {
ExternType::Global(ty) => Some(ImportType::new( ExternType::Global(ty) => Some(ImportType::new(extern_.module(), extern_.name(), *ty)),
extern_.module(),
extern_.name(),
ty.clone(),
)),
_ => None, _ => None,
}) })
} }

View File

@@ -156,7 +156,7 @@ impl Table {
/// ///
/// This function is used in the `wasmer_runtime::Instance` to retrieve /// This function is used in the `wasmer_runtime::Instance` to retrieve
/// the host table pointer and interact with the host table directly. /// the host table pointer and interact with the host table directly.
pub fn as_mut_ptr(&self) -> *mut Table { pub fn as_mut_ptr(&self) -> *mut Self {
self as *const Table as *mut Table self as *const Self as *mut Self
} }
} }

View File

@@ -17,21 +17,3 @@ void Unwind(void *JmpBuf) {
jmp_buf *buf = (jmp_buf*) JmpBuf; jmp_buf *buf = (jmp_buf*) JmpBuf;
longjmp(*buf, 1); longjmp(*buf, 1);
} }
#ifdef __APPLE__
#include <sys/ucontext.h>
void* GetPcFromUContext(ucontext_t *cx) {
return (void*) cx->uc_mcontext->__ss.__rip;
}
#endif
#if defined(__linux__) && defined(__aarch64__)
#include <sys/ucontext.h>
void* GetPcFromUContext(ucontext_t *cx) {
return (void*) cx->uc_mcontext.pc;
}
#endif // __linux__ && __aarch64__

View File

@@ -114,9 +114,9 @@ cfg_if::cfg_if! {
// exception was handled by a custom exception handler, so we // exception was handled by a custom exception handler, so we
// keep executing. // keep executing.
if jmp_buf.is_null() { if jmp_buf.is_null() {
return false; false
} else if jmp_buf as usize == 1 { } else if jmp_buf as usize == 1 {
return true; true
} else { } else {
Unwind(jmp_buf) Unwind(jmp_buf)
} }
@@ -161,18 +161,11 @@ cfg_if::cfg_if! {
let cx = &*(cx as *const libc::ucontext_t); let cx = &*(cx as *const libc::ucontext_t);
cx.uc_mcontext.gregs[libc::REG_EIP as usize] as *const u8 cx.uc_mcontext.gregs[libc::REG_EIP as usize] as *const u8
} else if #[cfg(all(target_os = "linux", target_arch = "aarch64"))] { } else if #[cfg(all(target_os = "linux", target_arch = "aarch64"))] {
// libc doesn't seem to support Linux/aarch64 at the moment? let cx = &*(cx as *const libc::ucontext_t);
extern "C" { cx.uc_mcontext.pc as *const u8
fn GetPcFromUContext(cx: *mut libc::c_void) -> *const u8;
}
GetPcFromUContext(cx)
} else if #[cfg(target_os = "macos")] { } else if #[cfg(target_os = "macos")] {
// FIXME(rust-lang/libc#1702) - once that lands and is let cx = &*(cx as *const libc::ucontext_t);
// released we should inline the definition here (*cx.uc_mcontext).__ss.__rip as *const u8
extern "C" {
fn GetPcFromUContext(cx: *mut libc::c_void) -> *const u8;
}
GetPcFromUContext(cx)
} else { } else {
compile_error!("unsupported platform"); compile_error!("unsupported platform");
} }
@@ -356,7 +349,7 @@ impl Trap {
/// Internally saves a backtrace when constructed. /// Internally saves a backtrace when constructed.
pub fn wasm(trap_code: TrapCode) -> Self { pub fn wasm(trap_code: TrapCode) -> Self {
let backtrace = Backtrace::new_unresolved(); let backtrace = Backtrace::new_unresolved();
Trap::Wasm { Self::Wasm {
trap_code, trap_code,
backtrace, backtrace,
} }
@@ -367,7 +360,7 @@ impl Trap {
/// Internally saves a backtrace when constructed. /// Internally saves a backtrace when constructed.
pub fn oom() -> Self { pub fn oom() -> Self {
let backtrace = Backtrace::new_unresolved(); let backtrace = Backtrace::new_unresolved();
Trap::OOM { backtrace } Self::OOM { backtrace }
} }
} }
@@ -447,8 +440,8 @@ enum UnwindReason {
} }
impl CallThreadState { impl CallThreadState {
fn new(vmctx: *mut VMContext) -> CallThreadState { fn new(vmctx: *mut VMContext) -> Self {
CallThreadState { Self {
unwind: Cell::new(UnwindReason::None), unwind: Cell::new(UnwindReason::None),
vmctx, vmctx,
jmp_buf: Cell::new(ptr::null()), jmp_buf: Cell::new(ptr::null()),
@@ -458,7 +451,7 @@ impl CallThreadState {
} }
} }
fn with(mut self, closure: impl FnOnce(&CallThreadState) -> i32) -> Result<(), Trap> { fn with(mut self, closure: impl FnOnce(&Self) -> i32) -> Result<(), Trap> {
tls::with(|prev| { tls::with(|prev| {
self.prev = prev.map(|p| p as *const _); self.prev = prev.map(|p| p as *const _);
let ret = tls::set(&self, || closure(&self)); let ret = tls::set(&self, || closure(&self));
@@ -545,7 +538,7 @@ impl CallThreadState {
}; };
let result = call_handler(&handler); let result = call_handler(&handler);
i.instance().signal_handler.set(Some(handler)); i.instance().signal_handler.set(Some(handler));
return result; result
}) { }) {
self.handling_trap.set(false); self.handling_trap.set(false);
return 1 as *const _; return 1 as *const _;
@@ -715,7 +708,7 @@ fn setup_unix_signalstack() -> Result<(), Trap> {
impl Drop for Tls { impl Drop for Tls {
fn drop(&mut self) { fn drop(&mut self) {
let (ptr, size) = match self { let (ptr, size) = match self {
Tls::Allocated { Self::Allocated {
mmap_ptr, mmap_ptr,
mmap_size, mmap_size,
} => (*mmap_ptr, *mmap_size), } => (*mmap_ptr, *mmap_size),

View File

@@ -46,6 +46,52 @@ mod test_vmfunction_import {
} }
} }
/// The `VMDynamicFunctionImportContext` is the context that dynamic
/// functions will receive when called (rather than `vmctx`).
/// A dynamic function is a function for which we don't know the signature
/// until runtime.
///
/// As such, we need to expose the dynamic function `context`
/// containing the relevant context for running the function indicated
/// in `address`.
#[repr(C)]
pub struct VMDynamicFunctionImportContext<T: Sized> {
/// The address of the inner dynamic function.
///
/// Note: The function must be on the form of
/// `(*mut T, *mut VMContext, SignatureIndex, *mut i128)`.
pub address: *const VMFunctionBody,
/// The context that the inner dynamic function will receive.
pub ctx: T,
}
#[cfg(test)]
mod test_vmdynamicfunction_import_context {
use super::VMDynamicFunctionImportContext;
use crate::{Module, VMOffsets};
use memoffset::offset_of;
use std::mem::size_of;
#[test]
fn check_vmdynamicfunction_import_context_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMDynamicFunctionImportContext<usize>>(),
usize::from(offsets.size_of_vmdynamicfunction_import_context())
);
assert_eq!(
offset_of!(VMDynamicFunctionImportContext<usize>, address),
usize::from(offsets.vmdynamicfunction_import_context_address())
);
assert_eq!(
offset_of!(VMDynamicFunctionImportContext<usize>, ctx),
usize::from(offsets.vmdynamicfunction_import_context_ctx())
);
}
}
/// A placeholder byte-sized type which is just used to provide some amount of type /// A placeholder byte-sized type which is just used to provide some amount of type
/// safety when dealing with pointers to JIT-compiled function bodies. Note that it's /// safety when dealing with pointers to JIT-compiled function bodies. Note that it's
/// deliberately not Copy, as we shouldn't be carelessly copying function body bytes /// deliberately not Copy, as we shouldn't be carelessly copying function body bytes
@@ -64,6 +110,26 @@ mod test_vmfunction_body {
} }
} }
/// A function kind.
#[derive(Debug, Copy, Clone, PartialEq)]
#[repr(C)]
pub enum VMFunctionKind {
/// A function is static when it's address matches the signature:
/// (vmctx, vmctx, arg1, arg2...) -> (result1, result2, ...)
///
/// This is the default for functions that are defined:
/// 1. In the Host, natively
/// 2. In the WebAssembly file
Static,
/// A function is dynamic when it's address matches the signature:
/// (ctx, &[Type]) -> Vec<Type>
///
/// This is the default for functions that are defined:
/// 1. In the Host, dynamically
Dynamic,
}
/// The fields compiled code needs to access to utilize a WebAssembly table /// The fields compiled code needs to access to utilize a WebAssembly table
/// imported from another instance. /// imported from another instance.
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
@@ -321,6 +387,7 @@ mod test_vmtable_definition {
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
#[repr(C, align(16))] #[repr(C, align(16))]
pub struct VMGlobalDefinition { pub struct VMGlobalDefinition {
// TODO: use `UnsafeCell` here, make this not Copy; there's probably a ton of UB in this code right now
storage: [u8; 16], storage: [u8; 16],
// If more elements are added here, remember to add offset_of tests below! // If more elements are added here, remember to add offset_of tests below!
} }

View File

@@ -92,6 +92,30 @@ impl VMOffsets {
} }
} }
/// Offsets for [`VMDynamicFunctionImportContext`].
///
/// [`VMDynamicFunctionImportContext`]: crate::vmcontext::VMDynamicFunctionImportContext
impl VMOffsets {
/// The offset of the `address` field.
#[allow(clippy::erasing_op)]
pub fn vmdynamicfunction_import_context_address(&self) -> u8 {
0 * self.pointer_size
}
/// The offset of the `ctx` field.
#[allow(clippy::identity_op)]
pub fn vmdynamicfunction_import_context_ctx(&self) -> u8 {
1 * self.pointer_size
}
/// Return the size of [`VMDynamicFunctionImportContext`].
///
/// [`VMDynamicFunctionImportContext`]: crate::vmcontext::VMDynamicFunctionImportContext
pub fn size_of_vmdynamicfunction_import_context(&self) -> u8 {
2 * self.pointer_size
}
}
/// Offsets for `*const VMFunctionBody`. /// Offsets for `*const VMFunctionBody`.
impl VMOffsets { impl VMOffsets {
/// The size of the `current_elements` field. /// The size of the `current_elements` field.

View File

@@ -14,7 +14,7 @@ bincode = "1"
byteorder = "1.3" byteorder = "1.3"
thiserror = "1" thiserror = "1"
generational-arena = { version = "0.2", features = ["serde"] } generational-arena = { version = "0.2", features = ["serde"] }
libc = "0.2.60" libc = { version = "0.2.70", default-features = false }
tracing = "0.1" tracing = "0.1"
getrandom = "0.1" getrandom = "0.1"
time = "0.1" time = "0.1"

View File

@@ -19,7 +19,6 @@ mod types;
pub use self::builder::*; pub use self::builder::*;
pub use self::types::*; pub use self::types::*;
use crate::syscalls::types::*; use crate::syscalls::types::*;
use crate::WasiEnv;
use generational_arena::Arena; use generational_arena::Arena;
pub use generational_arena::Index as Inode; pub use generational_arena::Index as Inode;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@@ -52,15 +51,6 @@ const STDOUT_DEFAULT_RIGHTS: __wasi_rights_t = __WASI_RIGHT_FD_DATASYNC
| __WASI_RIGHT_POLL_FD_READWRITE; | __WASI_RIGHT_POLL_FD_READWRITE;
const STDERR_DEFAULT_RIGHTS: __wasi_rights_t = STDOUT_DEFAULT_RIGHTS; const STDERR_DEFAULT_RIGHTS: __wasi_rights_t = STDOUT_DEFAULT_RIGHTS;
/// Get WasiState from a Ctx
///
/// # Safety
/// - This function must be called on a `WasiEnv` that was created with `WasiState`
/// in the data field
pub unsafe fn get_wasi_state<'a>(env: &'a mut WasiEnv) -> &'a mut WasiState {
env.state_mut()
}
/// A completely aribtrary "big enough" number used as the upper limit for /// A completely aribtrary "big enough" number used as the upper limit for
/// the number of symlinks that can be traversed when resolving a path /// the number of symlinks that can be traversed when resolving a path
pub const MAX_SYMLINKS: u32 = 128; pub const MAX_SYMLINKS: u32 = 128;

View File

@@ -1001,7 +1001,7 @@ pub fn fd_readdir(
.map(|(name, inode)| { .map(|(name, inode)| {
let entry = &state.fs.inodes[*inode]; let entry = &state.fs.inodes[*inode];
( (
format!("{}", entry.name), entry.name.to_string(),
entry.stat.st_filetype, entry.stat.st_filetype,
entry.stat.st_ino, entry.stat.st_ino,
) )
@@ -1456,7 +1456,7 @@ pub fn path_filestat_get(
flags & __WASI_LOOKUP_SYMLINK_FOLLOW != 0, flags & __WASI_LOOKUP_SYMLINK_FOLLOW != 0,
)); ));
let stat = if state.fs.inodes[file_inode].is_preopened { let stat = if state.fs.inodes[file_inode].is_preopened {
state.fs.inodes[file_inode].stat.clone() state.fs.inodes[file_inode].stat
} else { } else {
wasi_try!(state wasi_try!(state
.fs .fs
@@ -1998,7 +1998,7 @@ pub fn path_remove_directory(
), ),
} }
if let Err(_) = std::fs::remove_dir(path_str) { if std::fs::remove_dir(path_str).is_err() {
// reinsert to prevent FS from being in bad state // reinsert to prevent FS from being in bad state
if let Kind::Dir { if let Kind::Dir {
ref mut entries, .. ref mut entries, ..

View File

@@ -1,4 +1,4 @@
use wasmer::{ExternType, ImportType, Module}; use wasmer::{ExternType, Module};
#[allow(dead_code)] #[allow(dead_code)]
/// Check if a provided module is compiled for some version of WASI. /// Check if a provided module is compiled for some version of WASI.

View File

@@ -55,6 +55,7 @@ entity_impl!(MemoryIndex);
/// Index type of a signature (imported or local) inside the WebAssembly module. /// Index type of a signature (imported or local) inside the WebAssembly module.
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)] #[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
#[repr(transparent)]
pub struct SignatureIndex(u32); pub struct SignatureIndex(u32);
entity_impl!(SignatureIndex); entity_impl!(SignatureIndex);

View File

@@ -261,39 +261,26 @@ pub struct FunctionBody(*mut u8);
/// Represents a function that can be used by WebAssembly. /// Represents a function that can be used by WebAssembly.
#[derive(Clone, Debug, Hash, PartialEq, Eq)] #[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct Func<Args = (), Rets = (), Env = ()> { pub struct Func<Args = (), Rets = ()> {
address: *const FunctionBody, address: *const FunctionBody,
env: Option<*mut Env>,
_phantom: PhantomData<(Args, Rets)>, _phantom: PhantomData<(Args, Rets)>,
} }
unsafe impl<Args, Rets> Send for Func<Args, Rets> {} unsafe impl<Args, Rets> Send for Func<Args, Rets> {}
impl<Args, Rets, Env> Func<Args, Rets, Env> impl<Args, Rets> Func<Args, Rets>
where where
Args: WasmTypeList, Args: WasmTypeList,
Rets: WasmTypeList, Rets: WasmTypeList,
Env: Sized,
{ {
/// Creates a new `Func`. /// Creates a new `Func`.
pub fn new<F>(func: F) -> Self pub fn new<F, T, E>(func: F) -> Self
where where
F: HostFunction<Args, Rets, WithoutEnv, Env>, F: HostFunction<Args, Rets, T, E>,
T: HostFunctionKind,
E: Sized,
{ {
Self { Self {
env: None,
address: func.to_raw(),
_phantom: PhantomData,
}
}
/// Creates a new `Func` with a given `env`.
pub fn new_env<F>(env: &mut Env, func: F) -> Self
where
F: HostFunction<Args, Rets, WithEnv, Env>,
{
Self {
env: Some(env),
address: func.to_raw(), address: func.to_raw(),
_phantom: PhantomData, _phantom: PhantomData,
} }
@@ -304,11 +291,6 @@ where
FunctionType::new(Args::wasm_types(), Rets::wasm_types()) FunctionType::new(Args::wasm_types(), Rets::wasm_types())
} }
/// Get the type of the Func
pub fn env(&self) -> Option<*mut Env> {
self.env
}
/// Get the address of the Func /// Get the address of the Func
pub fn address(&self) -> *const FunctionBody { pub fn address(&self) -> *const FunctionBody {
self.address self.address

View File

@@ -78,12 +78,12 @@ impl AnyRef {
any: data, any: data,
host_info: None, host_info: None,
}; };
AnyRef::Other(OtherRef(Rc::new(RefCell::new(info)))) Self::Other(OtherRef(Rc::new(RefCell::new(info))))
} }
/// Creates a `Null` reference. /// Creates a `Null` reference.
pub fn null() -> Self { pub fn null() -> Self {
AnyRef::Null Self::Null
} }
/// Returns the data stored in the reference if available. /// Returns the data stored in the reference if available.
@@ -93,7 +93,7 @@ impl AnyRef {
/// Panics if the variant isn't `AnyRef::Other`. /// Panics if the variant isn't `AnyRef::Other`.
pub fn data(&self) -> cell::Ref<Box<dyn Any>> { pub fn data(&self) -> cell::Ref<Box<dyn Any>> {
match self { match self {
AnyRef::Other(OtherRef(r)) => cell::Ref::map(r.borrow(), |r| &r.any), Self::Other(OtherRef(r)) => cell::Ref::map(r.borrow(), |r| &r.any),
_ => panic!("expected AnyRef::Other"), _ => panic!("expected AnyRef::Other"),
} }
} }
@@ -102,11 +102,9 @@ impl AnyRef {
/// values that compare as equal). /// values that compare as equal).
pub fn ptr_eq(&self, other: &AnyRef) -> bool { pub fn ptr_eq(&self, other: &AnyRef) -> bool {
match (self, other) { match (self, other) {
(AnyRef::Null, AnyRef::Null) => true, (Self::Null, AnyRef::Null) => true,
(AnyRef::Ref(InternalRef(ref a)), AnyRef::Ref(InternalRef(ref b))) => { (Self::Ref(InternalRef(ref a)), Self::Ref(InternalRef(ref b))) => a.ptr_eq(b.as_ref()),
a.ptr_eq(b.as_ref()) (Self::Other(OtherRef(ref a)), Self::Other(OtherRef(ref b))) => Rc::ptr_eq(a, b),
}
(AnyRef::Other(OtherRef(ref a)), AnyRef::Other(OtherRef(ref b))) => Rc::ptr_eq(a, b),
_ => false, _ => false,
} }
} }
@@ -118,9 +116,9 @@ impl AnyRef {
/// Panics if `AnyRef` is already borrowed or `AnyRef` is `Null`. /// Panics if `AnyRef` is already borrowed or `AnyRef` is `Null`.
pub fn host_info(&self) -> Option<cell::RefMut<Box<dyn HostInfo>>> { pub fn host_info(&self) -> Option<cell::RefMut<Box<dyn HostInfo>>> {
match self { match self {
AnyRef::Null => panic!("null"), Self::Null => panic!("null"),
AnyRef::Ref(r) => r.0.host_info(), Self::Ref(r) => r.0.host_info(),
AnyRef::Other(r) => { Self::Other(r) => {
let info = cell::RefMut::map(r.0.borrow_mut(), |b| &mut b.host_info); let info = cell::RefMut::map(r.0.borrow_mut(), |b| &mut b.host_info);
if info.is_none() { if info.is_none() {
return None; return None;
@@ -137,9 +135,9 @@ impl AnyRef {
/// Panics if `AnyRef` is already borrowed or `AnyRef` is `Null`. /// Panics if `AnyRef` is already borrowed or `AnyRef` is `Null`.
pub fn set_host_info(&self, info: Option<Box<dyn HostInfo>>) { pub fn set_host_info(&self, info: Option<Box<dyn HostInfo>>) {
match self { match self {
AnyRef::Null => panic!("null"), Self::Null => panic!("null"),
AnyRef::Ref(r) => r.0.set_host_info(info), Self::Ref(r) => r.0.set_host_info(info),
AnyRef::Other(r) => { Self::Other(r) => {
r.0.borrow_mut().host_info = info; r.0.borrow_mut().host_info = info;
} }
} }
@@ -149,9 +147,9 @@ impl AnyRef {
impl fmt::Debug for AnyRef { impl fmt::Debug for AnyRef {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
AnyRef::Null => write!(f, "null"), Self::Null => write!(f, "null"),
AnyRef::Ref(_) => write!(f, "anyref"), Self::Ref(_) => write!(f, "anyref"),
AnyRef::Other(_) => write!(f, "other ref"), Self::Other(_) => write!(f, "other ref"),
} }
} }
} }
@@ -175,14 +173,14 @@ pub struct HostRef<T>(Rc<RefCell<ContentBox<T>>>);
impl<T: 'static> HostRef<T> { impl<T: 'static> HostRef<T> {
/// Creates a new `HostRef<T>` from `T`. /// Creates a new `HostRef<T>` from `T`.
pub fn new(item: T) -> HostRef<T> { pub fn new(item: T) -> Self {
let anyref_data: Weak<HostRef<T>> = Weak::new(); let anyref_data: Weak<Self> = Weak::new();
let content = ContentBox { let content = ContentBox {
content: item, content: item,
host_info: None, host_info: None,
anyref_data, anyref_data,
}; };
HostRef(Rc::new(RefCell::new(content))) Self(Rc::new(RefCell::new(content)))
} }
/// Immutably borrows the wrapped data. /// Immutably borrows the wrapped data.
@@ -205,7 +203,7 @@ impl<T: 'static> HostRef<T> {
/// Returns true if the two `HostRef<T>`'s point to the same value (not just /// Returns true if the two `HostRef<T>`'s point to the same value (not just
/// values that compare as equal). /// values that compare as equal).
pub fn ptr_eq(&self, other: &HostRef<T>) -> bool { pub fn ptr_eq(&self, other: &Self) -> bool {
Rc::ptr_eq(&self.0, &other.0) Rc::ptr_eq(&self.0, &other.0)
} }
@@ -253,8 +251,8 @@ impl<T: 'static> InternalRefBase for HostRef<T> {
} }
impl<T> Clone for HostRef<T> { impl<T> Clone for HostRef<T> {
fn clone(&self) -> HostRef<T> { fn clone(&self) -> Self {
HostRef(self.0.clone()) Self(self.0.clone())
} }
} }

View File

@@ -34,7 +34,7 @@ impl Type {
/// `I64`, `F32`, `F64`, `V128`). /// `I64`, `F32`, `F64`, `V128`).
pub fn is_num(&self) -> bool { pub fn is_num(&self) -> bool {
match self { match self {
Type::I32 | Type::I64 | Type::F32 | Type::F64 | Type::V128 => true, Self::I32 | Self::I64 | Self::F32 | Self::F64 | Self::V128 => true,
_ => false, _ => false,
} }
} }
@@ -42,7 +42,7 @@ impl Type {
/// Returns true if `Type` matches either of the reference types. /// Returns true if `Type` matches either of the reference types.
pub fn is_ref(&self) -> bool { pub fn is_ref(&self) -> bool {
match self { match self {
Type::AnyRef | Type::FuncRef => true, Self::AnyRef | Self::FuncRef => true,
_ => false, _ => false,
} }
} }
@@ -171,7 +171,7 @@ macro_rules! accessors {
/// Attempt to return the underlying type of this external type, /// Attempt to return the underlying type of this external type,
/// returning `None` if it is a different type. /// returning `None` if it is a different type.
pub fn $get(&self) -> Option<&$ty> { pub fn $get(&self) -> Option<&$ty> {
if let ExternType::$variant(e) = self { if let Self::$variant(e) = self {
Some(e) Some(e)
} else { } else {
None None
@@ -200,10 +200,10 @@ impl ExternType {
/// Check if two externs are compatible /// Check if two externs are compatible
pub fn is_compatible_with(&self, other: &Self) -> bool { pub fn is_compatible_with(&self, other: &Self) -> bool {
match (self, other) { match (self, other) {
(ExternType::Function(a), ExternType::Function(b)) => a == b, (Self::Function(a), Self::Function(b)) => a == b,
(ExternType::Global(a), ExternType::Global(b)) => is_global_compatible(a, b), (Self::Global(a), Self::Global(b)) => is_global_compatible(a, b),
(ExternType::Table(a), ExternType::Table(b)) => is_table_compatible(a, b), (Self::Table(a), Self::Table(b)) => is_table_compatible(a, b),
(ExternType::Memory(a), ExternType::Memory(b)) => is_memory_compatible(a, b), (Self::Memory(a), Self::Memory(b)) => is_memory_compatible(a, b),
// The rest of possibilities, are not compatible // The rest of possibilities, are not compatible
_ => false, _ => false,
} }
@@ -288,16 +288,16 @@ pub enum Mutability {
} }
impl From<bool> for Mutability { impl From<bool> for Mutability {
fn from(val: bool) -> Mutability { fn from(val: bool) -> Self {
match val { match val {
false => Mutability::Const, false => Self::Const,
true => Mutability::Var, true => Self::Var,
} }
} }
} }
impl From<Mutability> for bool { impl From<Mutability> for bool {
fn from(val: Mutability) -> bool { fn from(val: Mutability) -> Self {
match val { match val {
Mutability::Const => false, Mutability::Const => false,
Mutability::Var => true, Mutability::Var => true,
@@ -374,20 +374,20 @@ impl GlobalInit {
/// Get the `GlobalInit` from a given `Value` /// Get the `GlobalInit` from a given `Value`
pub fn from_value<T>(value: Value<T>) -> Self { pub fn from_value<T>(value: Value<T>) -> Self {
match value { match value {
Value::I32(i) => GlobalInit::I32Const(i), Value::I32(i) => Self::I32Const(i),
Value::I64(i) => GlobalInit::I64Const(i), Value::I64(i) => Self::I64Const(i),
Value::F32(f) => GlobalInit::F32Const(f), Value::F32(f) => Self::F32Const(f),
Value::F64(f) => GlobalInit::F64Const(f), Value::F64(f) => Self::F64Const(f),
_ => unimplemented!("GlobalInit from_value for {:?}", value), _ => unimplemented!("GlobalInit from_value for {:?}", value),
} }
} }
/// Get the `Value` from the Global init value /// Get the `Value` from the Global init value
pub fn to_value<T>(&self) -> Value<T> { pub fn to_value<T>(&self) -> Value<T> {
match self { match self {
GlobalInit::I32Const(i) => Value::I32(*i), Self::I32Const(i) => Value::I32(*i),
GlobalInit::I64Const(i) => Value::I64(*i), Self::I64Const(i) => Value::I64(*i),
GlobalInit::F32Const(f) => Value::F32(*f), Self::F32Const(f) => Value::F32(*f),
GlobalInit::F64Const(f) => Value::F64(*f), Self::F64Const(f) => Value::F64(*f),
_ => unimplemented!("GlobalInit to_value for {:?}", self), _ => unimplemented!("GlobalInit to_value for {:?}", self),
} }
} }
@@ -414,8 +414,8 @@ pub struct TableType {
impl TableType { impl TableType {
/// Creates a new table descriptor which will contain the specified /// Creates a new table descriptor which will contain the specified
/// `element` and have the `limits` applied to its length. /// `element` and have the `limits` applied to its length.
pub fn new(ty: Type, minimum: u32, maximum: Option<u32>) -> TableType { pub fn new(ty: Type, minimum: u32, maximum: Option<u32>) -> Self {
TableType { Self {
ty, ty,
minimum, minimum,
maximum, maximum,
@@ -453,15 +453,11 @@ pub struct MemoryType {
impl MemoryType { impl MemoryType {
/// Creates a new descriptor for a WebAssembly memory given the specified /// Creates a new descriptor for a WebAssembly memory given the specified
/// limits of the memory. /// limits of the memory.
pub fn new<IntoPages>( pub fn new<IntoPages>(minimum: IntoPages, maximum: Option<IntoPages>, shared: bool) -> Self
minimum: IntoPages,
maximum: Option<IntoPages>,
shared: bool,
) -> MemoryType
where where
IntoPages: Into<Pages>, IntoPages: Into<Pages>,
{ {
MemoryType { Self {
minimum: minimum.into(), minimum: minimum.into(),
maximum: maximum.map(|m| m.into()), maximum: maximum.map(|m| m.into()),
shared, shared,

View File

@@ -69,59 +69,59 @@ impl fmt::Debug for Bytes {
} }
impl From<Pages> for Bytes { impl From<Pages> for Bytes {
fn from(pages: Pages) -> Bytes { fn from(pages: Pages) -> Self {
Bytes((pages.0 as usize) * WASM_PAGE_SIZE) Self((pages.0 as usize) * WASM_PAGE_SIZE)
} }
} }
impl From<usize> for Bytes { impl From<usize> for Bytes {
fn from(other: usize) -> Self { fn from(other: usize) -> Self {
Bytes(other) Self(other)
} }
} }
impl<T> Sub<T> for Pages impl<T> Sub<T> for Pages
where where
T: Into<Pages>, T: Into<Self>,
{ {
type Output = Pages; type Output = Self;
fn sub(self, rhs: T) -> Pages { fn sub(self, rhs: T) -> Self {
Pages(self.0 - rhs.into().0) Self(self.0 - rhs.into().0)
} }
} }
impl<T> Add<T> for Pages impl<T> Add<T> for Pages
where where
T: Into<Pages>, T: Into<Self>,
{ {
type Output = Pages; type Output = Self;
fn add(self, rhs: T) -> Pages { fn add(self, rhs: T) -> Self {
Pages(self.0 + rhs.into().0) Self(self.0 + rhs.into().0)
} }
} }
impl From<Bytes> for Pages { impl From<Bytes> for Pages {
fn from(bytes: Bytes) -> Pages { fn from(bytes: Bytes) -> Self {
Pages((bytes.0 / WASM_PAGE_SIZE) as u32) Self((bytes.0 / WASM_PAGE_SIZE) as u32)
} }
} }
impl<T> Sub<T> for Bytes impl<T> Sub<T> for Bytes
where where
T: Into<Bytes>, T: Into<Self>,
{ {
type Output = Bytes; type Output = Self;
fn sub(self, rhs: T) -> Bytes { fn sub(self, rhs: T) -> Self {
Bytes(self.0 - rhs.into().0) Self(self.0 - rhs.into().0)
} }
} }
impl<T> Add<T> for Bytes impl<T> Add<T> for Bytes
where where
T: Into<Bytes>, T: Into<Self>,
{ {
type Output = Bytes; type Output = Self;
fn add(self, rhs: T) -> Bytes { fn add(self, rhs: T) -> Self {
Bytes(self.0 + rhs.into().0) Self(self.0 + rhs.into().0)
} }
} }

View File

@@ -37,7 +37,7 @@ macro_rules! accessors {
/// Attempt to access the underlying value of this `Value`, returning /// Attempt to access the underlying value of this `Value`, returning
/// `None` if it is not the correct type. /// `None` if it is not the correct type.
pub fn $get(&self) -> Option<$ty> { pub fn $get(&self) -> Option<$ty> {
if let Value::$variant($bind) = self { if let Self::$variant($bind) = self {
Some($cvt) Some($cvt)
} else { } else {
None None
@@ -58,43 +58,53 @@ macro_rules! accessors {
impl<T> Value<T> { impl<T> Value<T> {
/// Returns a null `anyref` value. /// Returns a null `anyref` value.
pub fn null() -> Value<T> { pub fn null() -> Self {
Value::AnyRef(AnyRef::null()) Self::AnyRef(AnyRef::null())
} }
/// Returns the corresponding [`Type`] for this `Value`. /// Returns the corresponding [`Type`] for this `Value`.
pub fn ty(&self) -> Type { pub fn ty(&self) -> Type {
match self { match self {
Value::I32(_) => Type::I32, Self::I32(_) => Type::I32,
Value::I64(_) => Type::I64, Self::I64(_) => Type::I64,
Value::F32(_) => Type::F32, Self::F32(_) => Type::F32,
Value::F64(_) => Type::F64, Self::F64(_) => Type::F64,
Value::AnyRef(_) => Type::AnyRef, Self::AnyRef(_) => Type::AnyRef,
Value::FuncRef(_) => Type::FuncRef, Self::FuncRef(_) => Type::FuncRef,
Value::V128(_) => Type::V128, Self::V128(_) => Type::V128,
} }
} }
/// Writes it's value to a given pointer /// Writes it's value to a given pointer
///
/// # Safety
/// `p` must be:
/// - Sufficiently aligned for the Rust equivalent of the type in `self`
/// - Non-null and pointing to valid, mutable memory
pub unsafe fn write_value_to(&self, p: *mut i128) { pub unsafe fn write_value_to(&self, p: *mut i128) {
match self { match self {
Value::I32(i) => ptr::write(p as *mut i32, *i), Self::I32(i) => ptr::write(p as *mut i32, *i),
Value::I64(i) => ptr::write(p as *mut i64, *i), Self::I64(i) => ptr::write(p as *mut i64, *i),
Value::F32(u) => ptr::write(p as *mut f32, *u), Self::F32(u) => ptr::write(p as *mut f32, *u),
Value::F64(u) => ptr::write(p as *mut f64, *u), Self::F64(u) => ptr::write(p as *mut f64, *u),
Value::V128(b) => ptr::write(p as *mut u128, *b), Self::V128(b) => ptr::write(p as *mut u128, *b),
_ => unimplemented!("Value::write_value_to"), _ => unimplemented!("Value::write_value_to"),
} }
} }
/// Gets a `Value` given a pointer and a `Type` /// Gets a `Value` given a pointer and a `Type`
///
/// # Safety
/// `p` must be:
/// - Properly aligned to the specified `ty`'s Rust equivalent
/// - Non-null and pointing to valid memory
pub unsafe fn read_value_from(p: *const i128, ty: Type) -> Value<T> { pub unsafe fn read_value_from(p: *const i128, ty: Type) -> Value<T> {
match ty { match ty {
Type::I32 => Value::I32(ptr::read(p as *const i32)), Type::I32 => Self::I32(ptr::read(p as *const i32)),
Type::I64 => Value::I64(ptr::read(p as *const i64)), Type::I64 => Self::I64(ptr::read(p as *const i64)),
Type::F32 => Value::F32(ptr::read(p as *const f32)), Type::F32 => Self::F32(ptr::read(p as *const f32)),
Type::F64 => Value::F64(ptr::read(p as *const f64)), Type::F64 => Self::F64(ptr::read(p as *const f64)),
Type::V128 => Value::V128(ptr::read(p as *const u128)), Type::V128 => Self::V128(ptr::read(p as *const u128)),
_ => unimplemented!("Value::read_value_from"), _ => unimplemented!("Value::read_value_from"),
} }
} }
@@ -115,7 +125,7 @@ impl<T> Value<T> {
/// This will return `Some` for both the `AnyRef` and `FuncRef` types. /// This will return `Some` for both the `AnyRef` and `FuncRef` types.
pub fn anyref(&self) -> Option<AnyRef> { pub fn anyref(&self) -> Option<AnyRef> {
match self { match self {
Value::AnyRef(e) => Some(e.clone()), Self::AnyRef(e) => Some(e.clone()),
_ => None, _ => None,
} }
} }
@@ -134,13 +144,13 @@ impl<T> Value<T> {
impl<T> fmt::Debug for Value<T> { impl<T> fmt::Debug for Value<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
Value::I32(v) => write!(f, "I32({:?})", v), Self::I32(v) => write!(f, "I32({:?})", v),
Value::I64(v) => write!(f, "I64({:?})", v), Self::I64(v) => write!(f, "I64({:?})", v),
Value::F32(v) => write!(f, "F32({:?})", v), Self::F32(v) => write!(f, "F32({:?})", v),
Value::F64(v) => write!(f, "F64({:?})", v), Self::F64(v) => write!(f, "F64({:?})", v),
Value::AnyRef(v) => write!(f, "AnyRef({:?})", v), Self::AnyRef(v) => write!(f, "AnyRef({:?})", v),
Value::FuncRef(_) => write!(f, "FuncRef"), Self::FuncRef(_) => write!(f, "FuncRef"),
Value::V128(v) => write!(f, "V128({:?})", v), Self::V128(v) => write!(f, "V128({:?})", v),
} }
} }
} }
@@ -148,49 +158,49 @@ impl<T> fmt::Debug for Value<T> {
impl<T> ToString for Value<T> { impl<T> ToString for Value<T> {
fn to_string(&self) -> String { fn to_string(&self) -> String {
match self { match self {
Value::I32(v) => format!("{}", v), Self::I32(v) => v.to_string(),
Value::I64(v) => format!("{}", v), Self::I64(v) => v.to_string(),
Value::F32(v) => format!("{}", v), Self::F32(v) => v.to_string(),
Value::F64(v) => format!("{}", v), Self::F64(v) => v.to_string(),
Value::AnyRef(_) => format!("anyref"), Self::AnyRef(_) => "anyref".to_string(),
Value::FuncRef(_) => format!("funcref"), Self::FuncRef(_) => "funcref".to_string(),
Value::V128(v) => format!("{}", v), Self::V128(v) => v.to_string(),
} }
} }
} }
impl<T> From<i32> for Value<T> { impl<T> From<i32> for Value<T> {
fn from(val: i32) -> Value<T> { fn from(val: i32) -> Self {
Value::I32(val) Self::I32(val)
} }
} }
impl<T> From<i64> for Value<T> { impl<T> From<i64> for Value<T> {
fn from(val: i64) -> Value<T> { fn from(val: i64) -> Self {
Value::I64(val) Self::I64(val)
} }
} }
impl<T> From<f32> for Value<T> { impl<T> From<f32> for Value<T> {
fn from(val: f32) -> Value<T> { fn from(val: f32) -> Self {
Value::F32(val) Self::F32(val)
} }
} }
impl<T> From<f64> for Value<T> { impl<T> From<f64> for Value<T> {
fn from(val: f64) -> Value<T> { fn from(val: f64) -> Self {
Value::F64(val) Self::F64(val)
} }
} }
impl<T> From<AnyRef> for Value<T> { impl<T> From<AnyRef> for Value<T> {
fn from(val: AnyRef) -> Value<T> { fn from(val: AnyRef) -> Self {
Value::AnyRef(val) Self::AnyRef(val)
} }
} }
// impl<T> From<T> for Value<T> { // impl<T> From<T> for Value<T> {
// fn from(val: T) -> Value<T> { // fn from(val: T) -> Self {
// Value::FuncRef(val) // Self::FuncRef(val)
// } // }
// } // }

Some files were not shown because too many files have changed in this diff Show More