mirror of
https://github.com/mii443/wasmer.git
synced 2025-12-08 21:58:20 +00:00
In engine-jit, load code for each artifact contiguously.
This commit is contained in:
@@ -155,15 +155,15 @@ impl JITArtifact {
|
|||||||
finished_functions,
|
finished_functions,
|
||||||
_finished_function_call_trampolines,
|
_finished_function_call_trampolines,
|
||||||
finished_dynamic_function_trampolines,
|
finished_dynamic_function_trampolines,
|
||||||
|
custom_sections,
|
||||||
) = inner_jit.allocate(
|
) = inner_jit.allocate(
|
||||||
&mut unwind_registry,
|
&mut unwind_registry,
|
||||||
&serializable.compile_info.module,
|
&serializable.compile_info.module,
|
||||||
&serializable.compilation.function_bodies,
|
&serializable.compilation.function_bodies,
|
||||||
&serializable.compilation.function_call_trampolines,
|
&serializable.compilation.function_call_trampolines,
|
||||||
&serializable.compilation.dynamic_function_trampolines,
|
&serializable.compilation.dynamic_function_trampolines,
|
||||||
|
&serializable.compilation.custom_sections,
|
||||||
)?;
|
)?;
|
||||||
let custom_sections =
|
|
||||||
inner_jit.allocate_custom_sections(&serializable.compilation.custom_sections)?;
|
|
||||||
|
|
||||||
link_module(
|
link_module(
|
||||||
&serializable.compile_info.module,
|
&serializable.compile_info.module,
|
||||||
@@ -194,7 +194,7 @@ impl JITArtifact {
|
|||||||
.len();
|
.len();
|
||||||
let eh_frame_section_pointer = custom_sections[debug.eh_frame];
|
let eh_frame_section_pointer = custom_sections[debug.eh_frame];
|
||||||
Some(unsafe {
|
Some(unsafe {
|
||||||
std::slice::from_raw_parts(eh_frame_section_pointer, eh_frame_section_size)
|
std::slice::from_raw_parts(*eh_frame_section_pointer, eh_frame_section_size)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
None => None,
|
None => None,
|
||||||
|
|||||||
@@ -3,12 +3,9 @@
|
|||||||
|
|
||||||
//! Memory management for executable code.
|
//! Memory management for executable code.
|
||||||
use crate::unwind::UnwindRegistry;
|
use crate::unwind::UnwindRegistry;
|
||||||
use std::mem::ManuallyDrop;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::{cmp, mem};
|
use wasmer_compiler::{CompiledFunctionUnwindInfo, CustomSection, FunctionBody};
|
||||||
use wasmer_compiler::{CompiledFunctionUnwindInfo, FunctionBody, SectionBody};
|
use wasmer_vm::{Mmap, VMFunctionBody};
|
||||||
use wasmer_types::entity::{EntityRef, PrimaryMap};
|
|
||||||
use wasmer_vm::{FunctionBodyPtr, Mmap, VMFunctionBody};
|
|
||||||
|
|
||||||
/// The optimal alignment for functions.
|
/// The optimal alignment for functions.
|
||||||
///
|
///
|
||||||
@@ -17,193 +14,145 @@ use wasmer_vm::{FunctionBodyPtr, Mmap, VMFunctionBody};
|
|||||||
/// optimal alignment values.
|
/// optimal alignment values.
|
||||||
const ARCH_FUNCTION_ALIGNMENT: usize = 16;
|
const ARCH_FUNCTION_ALIGNMENT: usize = 16;
|
||||||
|
|
||||||
struct CodeMemoryEntry {
|
/// The optimal alignment for data.
|
||||||
mmap: ManuallyDrop<Mmap>,
|
///
|
||||||
}
|
const DATA_SECTION_ALIGNMENT: usize = 64;
|
||||||
|
|
||||||
impl CodeMemoryEntry {
|
|
||||||
fn new() -> Self {
|
|
||||||
let mmap = ManuallyDrop::new(Mmap::new());
|
|
||||||
Self { mmap }
|
|
||||||
}
|
|
||||||
fn with_capacity(cap: usize) -> Result<Self, String> {
|
|
||||||
let mmap = ManuallyDrop::new(Mmap::with_at_least(cap)?);
|
|
||||||
Ok(Self { mmap })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for CodeMemoryEntry {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
unsafe {
|
|
||||||
ManuallyDrop::drop(&mut self.mmap);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Memory manager for executable code.
|
/// Memory manager for executable code.
|
||||||
pub struct CodeMemory {
|
pub struct CodeMemory {
|
||||||
current: CodeMemoryEntry,
|
|
||||||
entries: Vec<CodeMemoryEntry>,
|
|
||||||
unwind_registries: Vec<Arc<UnwindRegistry>>,
|
unwind_registries: Vec<Arc<UnwindRegistry>>,
|
||||||
read_sections: Vec<Vec<u8>>,
|
mmap: Mmap,
|
||||||
position: usize,
|
start_of_nonexecutable_pages: usize,
|
||||||
published: usize,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CodeMemory {
|
impl CodeMemory {
|
||||||
/// Create a new `CodeMemory` instance.
|
/// Create a new `CodeMemory` instance.
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
current: CodeMemoryEntry::new(),
|
|
||||||
entries: Vec::new(),
|
|
||||||
read_sections: Vec::new(),
|
|
||||||
unwind_registries: Vec::new(),
|
unwind_registries: Vec::new(),
|
||||||
position: 0,
|
mmap: Mmap::new(),
|
||||||
published: 0,
|
start_of_nonexecutable_pages: 0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Allocate a single contiguous block of memory for the functions and custom sections, and copy the data in place.
|
||||||
|
pub fn allocate(
|
||||||
|
&mut self,
|
||||||
|
registry: &mut UnwindRegistry,
|
||||||
|
functions: &[&FunctionBody],
|
||||||
|
executable_sections: &[&CustomSection],
|
||||||
|
data_sections: &[&CustomSection],
|
||||||
|
) -> Result<(Vec<&mut [VMFunctionBody]>, Vec<&mut [u8]>, Vec<&mut [u8]>), String> {
|
||||||
|
let mut function_result = vec![];
|
||||||
|
let mut data_section_result = vec![];
|
||||||
|
let mut executable_section_result = vec![];
|
||||||
|
|
||||||
|
let page_size = region::page::size();
|
||||||
|
|
||||||
|
// 1. Calculate the total size, that is:
|
||||||
|
// - function body size, including all trampolines
|
||||||
|
// -- windows unwind info
|
||||||
|
// -- padding between functions
|
||||||
|
// - executable section body
|
||||||
|
// -- padding between executable sections
|
||||||
|
// - padding until a new page to change page permissions
|
||||||
|
// - data section body size
|
||||||
|
// -- padding between data sections
|
||||||
|
|
||||||
|
let total_len = round_up(
|
||||||
|
functions.iter().fold(0, |acc, func| {
|
||||||
|
round_up(
|
||||||
|
acc + Self::function_allocation_size(func),
|
||||||
|
ARCH_FUNCTION_ALIGNMENT,
|
||||||
|
)
|
||||||
|
}) + executable_sections.iter().fold(0, |acc, exec| {
|
||||||
|
round_up(acc + exec.bytes.len(), ARCH_FUNCTION_ALIGNMENT)
|
||||||
|
}),
|
||||||
|
page_size,
|
||||||
|
) + data_sections.iter().fold(0, |acc, data| {
|
||||||
|
round_up(acc + data.bytes.len(), DATA_SECTION_ALIGNMENT)
|
||||||
|
});
|
||||||
|
|
||||||
|
// 2. Allocate the pages. Mark them all read-write.
|
||||||
|
|
||||||
|
self.mmap = Mmap::with_at_least(total_len)?;
|
||||||
|
|
||||||
|
// 3. Determine where the pointers to each function, executable section
|
||||||
|
// or data section are. Copy the functions. Collect the addresses of each and return them.
|
||||||
|
|
||||||
|
let mut bytes = 0;
|
||||||
|
let mut buf = self.mmap.as_mut_slice();
|
||||||
|
for func in functions {
|
||||||
|
let len = round_up(
|
||||||
|
Self::function_allocation_size(func),
|
||||||
|
ARCH_FUNCTION_ALIGNMENT,
|
||||||
|
);
|
||||||
|
let (func_buf, next_buf) = buf.split_at_mut(len);
|
||||||
|
buf = next_buf;
|
||||||
|
bytes += len;
|
||||||
|
|
||||||
|
let vmfunc = Self::copy_function(registry, func, func_buf);
|
||||||
|
assert!(vmfunc as *mut _ as *mut u8 as usize % ARCH_FUNCTION_ALIGNMENT == 0);
|
||||||
|
function_result.push(vmfunc);
|
||||||
|
}
|
||||||
|
for section in executable_sections {
|
||||||
|
let section = §ion.bytes;
|
||||||
|
assert!(buf.as_mut_ptr() as *mut _ as *mut u8 as usize % ARCH_FUNCTION_ALIGNMENT == 0);
|
||||||
|
let len = round_up(section.len(), ARCH_FUNCTION_ALIGNMENT);
|
||||||
|
let (s, next_buf) = buf.split_at_mut(len);
|
||||||
|
buf = next_buf;
|
||||||
|
bytes += len;
|
||||||
|
s[..section.len()].copy_from_slice(section.as_slice());
|
||||||
|
executable_section_result.push(s);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.start_of_nonexecutable_pages = bytes;
|
||||||
|
|
||||||
|
if !data_sections.is_empty() {
|
||||||
|
// Data sections have different page permissions from the executable
|
||||||
|
// code that came before it, so they need to be on different pages.
|
||||||
|
let padding = round_up(bytes, page_size) - bytes;
|
||||||
|
buf = buf.split_at_mut(padding).1;
|
||||||
|
|
||||||
|
for section in data_sections {
|
||||||
|
let section = §ion.bytes;
|
||||||
|
assert!(
|
||||||
|
buf.as_mut_ptr() as *mut _ as *mut u8 as usize % DATA_SECTION_ALIGNMENT == 0
|
||||||
|
);
|
||||||
|
let len = round_up(section.len(), DATA_SECTION_ALIGNMENT);
|
||||||
|
let (s, next_buf) = buf.split_at_mut(len);
|
||||||
|
buf = next_buf;
|
||||||
|
s[..section.len()].copy_from_slice(section.as_slice());
|
||||||
|
data_section_result.push(s);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
function_result,
|
||||||
|
executable_section_result,
|
||||||
|
data_section_result,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
/// Publish the unwind registry into code memory.
|
/// Publish the unwind registry into code memory.
|
||||||
pub(crate) fn publish_unwind_registry(&mut self, unwind_registry: Arc<UnwindRegistry>) {
|
pub(crate) fn publish_unwind_registry(&mut self, unwind_registry: Arc<UnwindRegistry>) {
|
||||||
self.unwind_registries.push(unwind_registry);
|
self.unwind_registries.push(unwind_registry);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Allocate a continuous memory block for a compilation.
|
/// Apply the page permissions.
|
||||||
///
|
|
||||||
/// Allocates memory for both the function bodies as well as function unwind data.
|
|
||||||
pub fn allocate_functions<K>(
|
|
||||||
&mut self,
|
|
||||||
registry: &mut UnwindRegistry,
|
|
||||||
compilation: &PrimaryMap<K, FunctionBody>,
|
|
||||||
) -> Result<PrimaryMap<K, FunctionBodyPtr>, String>
|
|
||||||
where
|
|
||||||
K: EntityRef,
|
|
||||||
{
|
|
||||||
let total_len = compilation.values().fold(0, |acc, func| {
|
|
||||||
acc + get_align_padding_size(acc, ARCH_FUNCTION_ALIGNMENT)
|
|
||||||
+ Self::function_allocation_size(func)
|
|
||||||
});
|
|
||||||
|
|
||||||
let (mut buf, start) = self.allocate(total_len, ARCH_FUNCTION_ALIGNMENT)?;
|
|
||||||
let base_address = buf.as_ptr() as usize - start;
|
|
||||||
let mut result = PrimaryMap::with_capacity(compilation.len());
|
|
||||||
let mut start = start as u32;
|
|
||||||
let mut padding = 0usize;
|
|
||||||
for func in compilation.values() {
|
|
||||||
let (next_start, next_buf, vmfunc) = Self::copy_function(
|
|
||||||
registry,
|
|
||||||
base_address,
|
|
||||||
func,
|
|
||||||
start + padding as u32,
|
|
||||||
&mut buf[padding..],
|
|
||||||
);
|
|
||||||
assert!(vmfunc as *mut _ as *mut u8 as usize % ARCH_FUNCTION_ALIGNMENT == 0);
|
|
||||||
|
|
||||||
result.push(FunctionBodyPtr(vmfunc as *mut [VMFunctionBody]));
|
|
||||||
|
|
||||||
padding = get_align_padding_size(next_start as usize, ARCH_FUNCTION_ALIGNMENT);
|
|
||||||
start = next_start;
|
|
||||||
buf = next_buf;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Allocate a continuous memory block for a single compiled function.
|
|
||||||
/// TODO: Reorganize the code that calls this to emit code directly into the
|
|
||||||
/// mmap region rather than into a Vec that we need to copy in.
|
|
||||||
pub fn allocate_for_function(
|
|
||||||
&mut self,
|
|
||||||
registry: &mut UnwindRegistry,
|
|
||||||
func: &FunctionBody,
|
|
||||||
) -> Result<&mut [VMFunctionBody], String> {
|
|
||||||
let size = Self::function_allocation_size(func);
|
|
||||||
|
|
||||||
let (buf, start) = self.allocate(size, ARCH_FUNCTION_ALIGNMENT)?;
|
|
||||||
let base_address = buf.as_ptr() as usize - start;
|
|
||||||
|
|
||||||
let (_, _, vmfunc) = Self::copy_function(registry, base_address, func, start as u32, buf);
|
|
||||||
assert!(vmfunc as *mut _ as *mut u8 as usize % ARCH_FUNCTION_ALIGNMENT == 0);
|
|
||||||
|
|
||||||
Ok(vmfunc)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Allocate a continuous memory block for an executable custom section.
|
|
||||||
pub fn allocate_for_executable_custom_section(
|
|
||||||
&mut self,
|
|
||||||
section: &SectionBody,
|
|
||||||
) -> Result<&mut [u8], String> {
|
|
||||||
let section = section.as_slice();
|
|
||||||
let (buf, _) = self.allocate(section.len(), ARCH_FUNCTION_ALIGNMENT)?;
|
|
||||||
buf.copy_from_slice(section);
|
|
||||||
Ok(buf)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Allocate a continuous memory block for a readable custom section.
|
|
||||||
pub fn allocate_for_custom_section(
|
|
||||||
&mut self,
|
|
||||||
section: &SectionBody,
|
|
||||||
) -> Result<&mut [u8], String> {
|
|
||||||
let section = section.as_slice().to_vec();
|
|
||||||
self.read_sections.push(section);
|
|
||||||
Ok(self
|
|
||||||
.read_sections
|
|
||||||
.last_mut()
|
|
||||||
.ok_or_else(|| "Can't get last section".to_string())?)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Make all allocated memory executable.
|
|
||||||
pub fn publish(&mut self) {
|
pub fn publish(&mut self) {
|
||||||
self.push_current(0)
|
if self.mmap.is_empty() || self.start_of_nonexecutable_pages == 0 {
|
||||||
.expect("failed to push current memory map");
|
return;
|
||||||
|
|
||||||
for CodeMemoryEntry { mmap: m } in &mut self.entries[self.published..] {
|
|
||||||
if !m.is_empty() {
|
|
||||||
unsafe {
|
|
||||||
region::protect(m.as_mut_ptr(), m.len(), region::Protection::READ_EXECUTE)
|
|
||||||
}
|
|
||||||
.expect("unable to make memory readonly and executable");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
assert!(self.mmap.len() >= self.start_of_nonexecutable_pages);
|
||||||
self.published = self.entries.len();
|
unsafe {
|
||||||
}
|
region::protect(
|
||||||
|
self.mmap.as_mut_ptr(),
|
||||||
/// Allocate `size` bytes of memory which can be made executable later by
|
self.start_of_nonexecutable_pages,
|
||||||
/// calling `publish()`. Note that we allocate the memory as writeable so
|
region::Protection::READ_EXECUTE,
|
||||||
/// that it can be written to and patched, though we make it readonly before
|
)
|
||||||
/// actually executing from it.
|
|
||||||
///
|
|
||||||
/// A few values are returned:
|
|
||||||
///
|
|
||||||
/// * A mutable slice which references the allocated memory
|
|
||||||
/// * A function table instance where unwind information is registered
|
|
||||||
/// * The offset within the current mmap that the slice starts at
|
|
||||||
fn allocate(&mut self, size: usize, alignment: usize) -> Result<(&mut [u8], usize), String> {
|
|
||||||
assert!(alignment > 0);
|
|
||||||
|
|
||||||
let align_padding = get_align_padding_size(self.position, alignment);
|
|
||||||
let padded_size = size + align_padding;
|
|
||||||
|
|
||||||
let old_position;
|
|
||||||
|
|
||||||
if self.current.mmap.len() - self.position < padded_size {
|
|
||||||
// If we are allocating a new region, then it is already aligned to page boundary - no need to apply padding here.
|
|
||||||
self.push_current(cmp::max(0x10000, size))?;
|
|
||||||
old_position = 0;
|
|
||||||
self.position += size;
|
|
||||||
} else {
|
|
||||||
// Otherwise, apply padding.
|
|
||||||
old_position = self.position + align_padding;
|
|
||||||
self.position += padded_size;
|
|
||||||
}
|
}
|
||||||
|
.expect("unable to make memory readonly and executable");
|
||||||
assert!(old_position % alignment == 0);
|
|
||||||
|
|
||||||
Ok((
|
|
||||||
&mut self.current.mmap.as_mut_slice()[old_position..self.position],
|
|
||||||
old_position,
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Calculates the allocation size of the given compiled function.
|
/// Calculates the allocation size of the given compiled function.
|
||||||
@@ -224,41 +173,35 @@ impl CodeMemory {
|
|||||||
/// This will also add the function to the current function table.
|
/// This will also add the function to the current function table.
|
||||||
fn copy_function<'a>(
|
fn copy_function<'a>(
|
||||||
registry: &mut UnwindRegistry,
|
registry: &mut UnwindRegistry,
|
||||||
base_address: usize,
|
|
||||||
func: &FunctionBody,
|
func: &FunctionBody,
|
||||||
func_start: u32,
|
|
||||||
buf: &'a mut [u8],
|
buf: &'a mut [u8],
|
||||||
) -> (u32, &'a mut [u8], &'a mut [VMFunctionBody]) {
|
) -> &'a mut [VMFunctionBody] {
|
||||||
assert!((func_start as usize) % ARCH_FUNCTION_ALIGNMENT == 0);
|
assert!((buf.as_ptr() as usize) % ARCH_FUNCTION_ALIGNMENT == 0);
|
||||||
|
|
||||||
let func_len = func.body.len();
|
let func_len = func.body.len();
|
||||||
let mut func_end = func_start + (func_len as u32);
|
|
||||||
|
|
||||||
let (body, mut remainder) = buf.split_at_mut(func_len);
|
let (body, remainder) = buf.split_at_mut(func_len);
|
||||||
body.copy_from_slice(&func.body);
|
body.copy_from_slice(&func.body);
|
||||||
let vmfunc = Self::view_as_mut_vmfunc_slice(body);
|
let vmfunc = Self::view_as_mut_vmfunc_slice(body);
|
||||||
|
|
||||||
if let Some(CompiledFunctionUnwindInfo::WindowsX64(info)) = &func.unwind_info {
|
if let Some(CompiledFunctionUnwindInfo::WindowsX64(info)) = &func.unwind_info {
|
||||||
// Windows unwind information is written following the function body
|
// Windows unwind information is written following the function body
|
||||||
// Keep unwind information 32-bit aligned (round up to the nearest 4 byte boundary)
|
// Keep unwind information 32-bit aligned (round up to the nearest 4 byte boundary)
|
||||||
let unwind_start = (func_end + 3) & !3;
|
let unwind_start = (func_len + 3) & !3;
|
||||||
let unwind_size = info.len();
|
let unwind_size = info.len();
|
||||||
let padding = (unwind_start - func_end) as usize;
|
let padding = unwind_start - func_len;
|
||||||
assert_eq!((func_start as usize + func_len + padding) % 4, 0);
|
assert_eq!((func_len + padding) % 4, 0);
|
||||||
let (slice, r) = remainder.split_at_mut(padding + unwind_size);
|
let slice = remainder.split_at_mut(padding + unwind_size).0;
|
||||||
slice[padding..].copy_from_slice(&info);
|
slice[padding..].copy_from_slice(&info);
|
||||||
// println!("Info {:?} (func_len: {}, padded: {})", info, func_len, padding);
|
|
||||||
func_end = unwind_start + (unwind_size as u32);
|
|
||||||
remainder = r;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(info) = &func.unwind_info {
|
if let Some(info) = &func.unwind_info {
|
||||||
registry
|
registry
|
||||||
.register(base_address, func_start, func_len as u32, info)
|
.register(vmfunc.as_ptr() as usize, 0, func_len as u32, info)
|
||||||
.expect("failed to register unwind information");
|
.expect("failed to register unwind information");
|
||||||
}
|
}
|
||||||
|
|
||||||
(func_end, remainder, vmfunc)
|
vmfunc
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert mut a slice from u8 to VMFunctionBody.
|
/// Convert mut a slice from u8 to VMFunctionBody.
|
||||||
@@ -267,34 +210,11 @@ impl CodeMemory {
|
|||||||
let body_ptr = byte_ptr as *mut [VMFunctionBody];
|
let body_ptr = byte_ptr as *mut [VMFunctionBody];
|
||||||
unsafe { &mut *body_ptr }
|
unsafe { &mut *body_ptr }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Pushes the current Mmap and allocates a new Mmap of the given size.
|
|
||||||
fn push_current(&mut self, new_size: usize) -> Result<(), String> {
|
|
||||||
let previous = mem::replace(
|
|
||||||
&mut self.current,
|
|
||||||
if new_size == 0 {
|
|
||||||
CodeMemoryEntry::new()
|
|
||||||
} else {
|
|
||||||
CodeMemoryEntry::with_capacity(cmp::max(0x10000, new_size))?
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
if !previous.mmap.is_empty() {
|
|
||||||
self.entries.push(previous);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.position = 0;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Calculates the minimum number of padding bytes required to fulfill `alignment`.
|
fn round_up(size: usize, multiple: usize) -> usize {
|
||||||
fn get_align_padding_size(position: usize, alignment: usize) -> usize {
|
debug_assert!(multiple.is_power_of_two());
|
||||||
match position % alignment {
|
(size + (multiple - 1)) & !(multiple - 1)
|
||||||
0 => 0,
|
|
||||||
x => alignment - x,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@@ -14,8 +14,8 @@ use wasmer_types::entity::PrimaryMap;
|
|||||||
use wasmer_types::Features;
|
use wasmer_types::Features;
|
||||||
use wasmer_types::{FunctionIndex, FunctionType, LocalFunctionIndex, SignatureIndex};
|
use wasmer_types::{FunctionIndex, FunctionType, LocalFunctionIndex, SignatureIndex};
|
||||||
use wasmer_vm::{
|
use wasmer_vm::{
|
||||||
FunctionBodyPtr, ModuleInfo, SignatureRegistry, VMFunctionBody, VMSharedSignatureIndex,
|
FunctionBodyPtr, ModuleInfo, SectionBodyPtr, SignatureRegistry, VMFunctionBody,
|
||||||
VMTrampoline,
|
VMSharedSignatureIndex, VMTrampoline,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// A WebAssembly `JIT` Engine.
|
/// A WebAssembly `JIT` Engine.
|
||||||
@@ -35,7 +35,7 @@ impl JITEngine {
|
|||||||
inner: Arc::new(Mutex::new(JITEngineInner {
|
inner: Arc::new(Mutex::new(JITEngineInner {
|
||||||
compiler: Some(compiler),
|
compiler: Some(compiler),
|
||||||
function_call_trampolines: HashMap::new(),
|
function_call_trampolines: HashMap::new(),
|
||||||
code_memory: CodeMemory::new(),
|
code_memory: vec![],
|
||||||
signatures: SignatureRegistry::new(),
|
signatures: SignatureRegistry::new(),
|
||||||
features,
|
features,
|
||||||
})),
|
})),
|
||||||
@@ -63,7 +63,7 @@ impl JITEngine {
|
|||||||
#[cfg(feature = "compiler")]
|
#[cfg(feature = "compiler")]
|
||||||
compiler: None,
|
compiler: None,
|
||||||
function_call_trampolines: HashMap::new(),
|
function_call_trampolines: HashMap::new(),
|
||||||
code_memory: CodeMemory::new(),
|
code_memory: vec![],
|
||||||
signatures: SignatureRegistry::new(),
|
signatures: SignatureRegistry::new(),
|
||||||
features: Features::default(),
|
features: Features::default(),
|
||||||
})),
|
})),
|
||||||
@@ -157,7 +157,7 @@ pub struct JITEngineInner {
|
|||||||
features: Features,
|
features: Features,
|
||||||
/// The code memory is responsible of publishing the compiled
|
/// The code memory is responsible of publishing the compiled
|
||||||
/// functions to memory.
|
/// functions to memory.
|
||||||
code_memory: CodeMemory,
|
code_memory: Vec<CodeMemory>,
|
||||||
/// The signature registry is used mainly to operate with trampolines
|
/// The signature registry is used mainly to operate with trampolines
|
||||||
/// performantly.
|
/// performantly.
|
||||||
signatures: SignatureRegistry,
|
signatures: SignatureRegistry,
|
||||||
@@ -193,38 +193,6 @@ impl JITEngineInner {
|
|||||||
&self.features
|
&self.features
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Allocate custom sections into memory
|
|
||||||
pub(crate) fn allocate_custom_sections(
|
|
||||||
&mut self,
|
|
||||||
custom_sections: &PrimaryMap<SectionIndex, CustomSection>,
|
|
||||||
) -> Result<PrimaryMap<SectionIndex, *const u8>, CompileError> {
|
|
||||||
let mut result = PrimaryMap::with_capacity(custom_sections.len());
|
|
||||||
for (_, section) in custom_sections.iter() {
|
|
||||||
let buffer: &[u8] = match section.protection {
|
|
||||||
CustomSectionProtection::Read => self
|
|
||||||
.code_memory
|
|
||||||
.allocate_for_custom_section(§ion.bytes)
|
|
||||||
.map_err(|message| {
|
|
||||||
CompileError::Resource(format!(
|
|
||||||
"failed to allocate readable memory for custom section: {}",
|
|
||||||
message
|
|
||||||
))
|
|
||||||
})?,
|
|
||||||
CustomSectionProtection::ReadExecute => self
|
|
||||||
.code_memory
|
|
||||||
.allocate_for_executable_custom_section(§ion.bytes)
|
|
||||||
.map_err(|message| {
|
|
||||||
CompileError::Resource(format!(
|
|
||||||
"failed to allocate executable memory for custom section: {}",
|
|
||||||
message
|
|
||||||
))
|
|
||||||
})?,
|
|
||||||
};
|
|
||||||
result.push(buffer.as_ptr());
|
|
||||||
}
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Allocate compiled functions into memory
|
/// Allocate compiled functions into memory
|
||||||
#[allow(clippy::type_complexity)]
|
#[allow(clippy::type_complexity)]
|
||||||
pub(crate) fn allocate(
|
pub(crate) fn allocate(
|
||||||
@@ -234,89 +202,105 @@ impl JITEngineInner {
|
|||||||
functions: &PrimaryMap<LocalFunctionIndex, FunctionBody>,
|
functions: &PrimaryMap<LocalFunctionIndex, FunctionBody>,
|
||||||
function_call_trampolines: &PrimaryMap<SignatureIndex, FunctionBody>,
|
function_call_trampolines: &PrimaryMap<SignatureIndex, FunctionBody>,
|
||||||
dynamic_function_trampolines: &PrimaryMap<FunctionIndex, FunctionBody>,
|
dynamic_function_trampolines: &PrimaryMap<FunctionIndex, FunctionBody>,
|
||||||
|
custom_sections: &PrimaryMap<SectionIndex, CustomSection>,
|
||||||
) -> Result<
|
) -> Result<
|
||||||
(
|
(
|
||||||
PrimaryMap<LocalFunctionIndex, FunctionBodyPtr>,
|
PrimaryMap<LocalFunctionIndex, FunctionBodyPtr>,
|
||||||
PrimaryMap<SignatureIndex, FunctionBodyPtr>,
|
PrimaryMap<SignatureIndex, FunctionBodyPtr>,
|
||||||
PrimaryMap<FunctionIndex, FunctionBodyPtr>,
|
PrimaryMap<FunctionIndex, FunctionBodyPtr>,
|
||||||
|
PrimaryMap<SectionIndex, SectionBodyPtr>,
|
||||||
),
|
),
|
||||||
CompileError,
|
CompileError,
|
||||||
> {
|
> {
|
||||||
// Allocate all of the compiled functions into executable memory,
|
let function_bodies = functions
|
||||||
// copying over their contents.
|
.values()
|
||||||
let allocated_functions = self
|
.chain(function_call_trampolines.values())
|
||||||
.code_memory
|
.chain(dynamic_function_trampolines.values())
|
||||||
.allocate_functions(registry, &functions)
|
.collect::<Vec<_>>();
|
||||||
.map_err(|message| {
|
let (executable_sections, data_sections): (Vec<_>, _) = custom_sections
|
||||||
CompileError::Resource(format!(
|
.values()
|
||||||
"failed to allocate memory for functions: {}",
|
.partition(|section| section.protection == CustomSectionProtection::ReadExecute);
|
||||||
message
|
self.code_memory.push(CodeMemory::new());
|
||||||
))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let mut allocated_function_call_trampolines: PrimaryMap<SignatureIndex, FunctionBodyPtr> =
|
let (mut allocated_functions, allocated_executable_sections, allocated_data_sections) =
|
||||||
PrimaryMap::new();
|
self.code_memory
|
||||||
// let (indices, compiled_functions): (Vec<VMSharedSignatureIndex>, PrimaryMap<FunctionIndex, FunctionBody>) = function_call_trampolines.iter().map(|(sig_index, compiled_function)| {
|
.last_mut()
|
||||||
// let func_type = module.signatures.get(sig_index).unwrap();
|
.unwrap()
|
||||||
// let index = self.signatures.register(&func_type);
|
.allocate(
|
||||||
// (index, compiled_function)
|
registry,
|
||||||
// }).filter(|(index, _)| {
|
function_bodies.as_slice(),
|
||||||
// !self.function_call_trampolines.contains_key(index)
|
executable_sections.as_slice(),
|
||||||
// }).unzip();
|
data_sections.as_slice(),
|
||||||
for (sig_index, compiled_function) in function_call_trampolines.iter() {
|
)
|
||||||
let func_type = module.signatures.get(sig_index).unwrap();
|
|
||||||
let index = self.signatures.register(&func_type);
|
|
||||||
// if self.function_call_trampolines.contains_key(&index) {
|
|
||||||
// // We don't need to allocate the trampoline in case
|
|
||||||
// // it's signature is already allocated.
|
|
||||||
// continue;
|
|
||||||
// }
|
|
||||||
let ptr = self
|
|
||||||
.code_memory
|
|
||||||
.allocate_for_function(registry, &compiled_function)
|
|
||||||
.map_err(|message| {
|
.map_err(|message| {
|
||||||
CompileError::Resource(format!(
|
CompileError::Resource(format!(
|
||||||
"failed to allocate memory for function call trampolines: {}",
|
"failed to allocate memory for functions: {}",
|
||||||
message
|
message
|
||||||
))
|
))
|
||||||
})?;
|
})?;
|
||||||
allocated_function_call_trampolines.push(FunctionBodyPtr(ptr));
|
|
||||||
let trampoline =
|
let allocated_functions_result = allocated_functions
|
||||||
unsafe { std::mem::transmute::<*const VMFunctionBody, VMTrampoline>(ptr.as_ptr()) };
|
.drain(0..functions.len())
|
||||||
|
.map(|slice| FunctionBodyPtr(slice as *mut [_]))
|
||||||
|
.collect::<PrimaryMap<LocalFunctionIndex, _>>();
|
||||||
|
|
||||||
|
let mut allocated_function_call_trampolines: PrimaryMap<SignatureIndex, FunctionBodyPtr> =
|
||||||
|
PrimaryMap::new();
|
||||||
|
for (sig_index, _) in function_call_trampolines.iter() {
|
||||||
|
let func_type = module.signatures.get(sig_index).unwrap();
|
||||||
|
let index = self.signatures.register(&func_type);
|
||||||
|
let ptr = allocated_functions
|
||||||
|
.drain(0..1)
|
||||||
|
.map(|slice| FunctionBodyPtr(slice as *mut [_]))
|
||||||
|
.collect::<Vec<_>>()[0];
|
||||||
|
allocated_function_call_trampolines.push(ptr);
|
||||||
|
let trampoline = unsafe {
|
||||||
|
std::mem::transmute::<*const VMFunctionBody, VMTrampoline>((**ptr).as_ptr())
|
||||||
|
};
|
||||||
self.function_call_trampolines.insert(index, trampoline);
|
self.function_call_trampolines.insert(index, trampoline);
|
||||||
}
|
}
|
||||||
|
|
||||||
let allocated_dynamic_function_trampolines = dynamic_function_trampolines
|
let allocated_dynamic_function_trampolines = allocated_functions
|
||||||
.values()
|
.drain(..)
|
||||||
.map(|compiled_function| {
|
.map(|slice| FunctionBodyPtr(slice as *mut [_]))
|
||||||
let ptr = self
|
.collect::<PrimaryMap<FunctionIndex, _>>();
|
||||||
.code_memory
|
|
||||||
.allocate_for_function(registry, &compiled_function)
|
let mut exec_iter = allocated_executable_sections.iter();
|
||||||
.map_err(|message| {
|
let mut data_iter = allocated_data_sections.iter();
|
||||||
CompileError::Resource(format!(
|
let allocated_custom_sections = custom_sections
|
||||||
"failed to allocate memory for dynamic function trampolines: {}",
|
.iter()
|
||||||
message
|
.map(|(_, section)| {
|
||||||
))
|
SectionBodyPtr(
|
||||||
})?;
|
if section.protection == CustomSectionProtection::ReadExecute {
|
||||||
Ok(FunctionBodyPtr(ptr as _))
|
exec_iter.next()
|
||||||
|
} else {
|
||||||
|
data_iter.next()
|
||||||
|
}
|
||||||
|
.unwrap()
|
||||||
|
.as_ptr(),
|
||||||
|
)
|
||||||
})
|
})
|
||||||
.collect::<Result<PrimaryMap<FunctionIndex, _>, CompileError>>()?;
|
.collect::<PrimaryMap<SectionIndex, _>>();
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
allocated_functions,
|
allocated_functions_result,
|
||||||
allocated_function_call_trampolines,
|
allocated_function_call_trampolines,
|
||||||
allocated_dynamic_function_trampolines,
|
allocated_dynamic_function_trampolines,
|
||||||
|
allocated_custom_sections,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Make memory containing compiled code executable.
|
/// Make memory containing compiled code executable.
|
||||||
pub(crate) fn publish_compiled_code(&mut self) {
|
pub(crate) fn publish_compiled_code(&mut self) {
|
||||||
self.code_memory.publish();
|
self.code_memory.last_mut().unwrap().publish();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Publish the unwind registry into code memory.
|
/// Publish the unwind registry into code memory.
|
||||||
pub(crate) fn publish_unwind_registry(&mut self, unwind_registry: Arc<UnwindRegistry>) {
|
pub(crate) fn publish_unwind_registry(&mut self, unwind_registry: Arc<UnwindRegistry>) {
|
||||||
self.code_memory.publish_unwind_registry(unwind_registry);
|
self.code_memory
|
||||||
|
.last_mut()
|
||||||
|
.unwrap()
|
||||||
|
.publish_unwind_registry(unwind_registry);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Shared signature registry.
|
/// Shared signature registry.
|
||||||
|
|||||||
@@ -8,14 +8,14 @@ use wasmer_compiler::{
|
|||||||
use wasmer_types::entity::{EntityRef, PrimaryMap};
|
use wasmer_types::entity::{EntityRef, PrimaryMap};
|
||||||
use wasmer_types::LocalFunctionIndex;
|
use wasmer_types::LocalFunctionIndex;
|
||||||
use wasmer_vm::ModuleInfo;
|
use wasmer_vm::ModuleInfo;
|
||||||
use wasmer_vm::{FunctionBodyPtr, VMFunctionBody};
|
use wasmer_vm::{FunctionBodyPtr, SectionBodyPtr, VMFunctionBody};
|
||||||
|
|
||||||
fn apply_relocation(
|
fn apply_relocation(
|
||||||
body: usize,
|
body: usize,
|
||||||
r: &Relocation,
|
r: &Relocation,
|
||||||
allocated_functions: &PrimaryMap<LocalFunctionIndex, FunctionBodyPtr>,
|
allocated_functions: &PrimaryMap<LocalFunctionIndex, FunctionBodyPtr>,
|
||||||
jt_offsets: &PrimaryMap<LocalFunctionIndex, JumpTableOffsets>,
|
jt_offsets: &PrimaryMap<LocalFunctionIndex, JumpTableOffsets>,
|
||||||
allocated_sections: &PrimaryMap<SectionIndex, *const u8>,
|
allocated_sections: &PrimaryMap<SectionIndex, SectionBodyPtr>,
|
||||||
) {
|
) {
|
||||||
let target_func_address: usize = match r.reloc_target {
|
let target_func_address: usize = match r.reloc_target {
|
||||||
RelocationTarget::LocalFunc(index) => {
|
RelocationTarget::LocalFunc(index) => {
|
||||||
@@ -24,7 +24,7 @@ fn apply_relocation(
|
|||||||
}
|
}
|
||||||
RelocationTarget::LibCall(libcall) => libcall.function_pointer(),
|
RelocationTarget::LibCall(libcall) => libcall.function_pointer(),
|
||||||
RelocationTarget::CustomSection(custom_section) => {
|
RelocationTarget::CustomSection(custom_section) => {
|
||||||
allocated_sections[custom_section] as usize
|
*allocated_sections[custom_section] as usize
|
||||||
}
|
}
|
||||||
RelocationTarget::JumpTable(func_index, jt) => {
|
RelocationTarget::JumpTable(func_index, jt) => {
|
||||||
let offset = *jt_offsets
|
let offset = *jt_offsets
|
||||||
@@ -72,11 +72,11 @@ pub fn link_module(
|
|||||||
allocated_functions: &PrimaryMap<LocalFunctionIndex, FunctionBodyPtr>,
|
allocated_functions: &PrimaryMap<LocalFunctionIndex, FunctionBodyPtr>,
|
||||||
jt_offsets: &PrimaryMap<LocalFunctionIndex, JumpTableOffsets>,
|
jt_offsets: &PrimaryMap<LocalFunctionIndex, JumpTableOffsets>,
|
||||||
function_relocations: Relocations,
|
function_relocations: Relocations,
|
||||||
allocated_sections: &PrimaryMap<SectionIndex, *const u8>,
|
allocated_sections: &PrimaryMap<SectionIndex, SectionBodyPtr>,
|
||||||
section_relocations: &PrimaryMap<SectionIndex, Vec<Relocation>>,
|
section_relocations: &PrimaryMap<SectionIndex, Vec<Relocation>>,
|
||||||
) {
|
) {
|
||||||
for (i, section_relocs) in section_relocations.iter() {
|
for (i, section_relocs) in section_relocations.iter() {
|
||||||
let body = allocated_sections[i] as usize;
|
let body = *allocated_sections[i] as usize;
|
||||||
for r in section_relocs {
|
for r in section_relocs {
|
||||||
apply_relocation(body, r, allocated_functions, jt_offsets, allocated_sections);
|
apply_relocation(body, r, allocated_functions, jt_offsets, allocated_sections);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -77,3 +77,16 @@ unsafe impl Send for FunctionBodyPtr {}
|
|||||||
/// # Safety
|
/// # Safety
|
||||||
/// TODO:
|
/// TODO:
|
||||||
unsafe impl Sync for FunctionBodyPtr {}
|
unsafe impl Sync for FunctionBodyPtr {}
|
||||||
|
|
||||||
|
/// Pointers to section data.
|
||||||
|
#[derive(Clone, Copy, Debug)]
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct SectionBodyPtr(pub *const u8);
|
||||||
|
|
||||||
|
impl std::ops::Deref for SectionBodyPtr {
|
||||||
|
type Target = *const u8;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user