mirror of
https://github.com/mii443/wasmer.git
synced 2025-12-09 14:18:20 +00:00
Merge remote-tracking branch 'origin/master' into middleware
This commit is contained in:
@@ -69,26 +69,6 @@ impl LLVMConfig {
|
||||
/// Creates a new configuration object with the default configuration
|
||||
/// specified.
|
||||
pub fn new(features: Features, target: Target) -> Self {
|
||||
let operating_system =
|
||||
if target.triple().operating_system == wasmer_compiler::OperatingSystem::Darwin {
|
||||
// LLVM detects static relocation + darwin + 64-bit and
|
||||
// force-enables PIC because MachO doesn't support that
|
||||
// combination. They don't check whether they're targeting
|
||||
// MachO, they check whether the OS is set to Darwin.
|
||||
//
|
||||
// Since both linux and darwin use SysV ABI, this should work.
|
||||
wasmer_compiler::OperatingSystem::Linux
|
||||
} else {
|
||||
target.triple().operating_system
|
||||
};
|
||||
let triple = Triple {
|
||||
architecture: target.triple().architecture,
|
||||
vendor: target.triple().vendor.clone(),
|
||||
operating_system,
|
||||
environment: target.triple().environment,
|
||||
binary_format: target_lexicon::BinaryFormat::Elf,
|
||||
};
|
||||
let target = Target::new(triple, *target.cpu_features());
|
||||
Self {
|
||||
enable_nan_canonicalization: true,
|
||||
enable_verifier: false,
|
||||
@@ -112,7 +92,27 @@ impl LLVMConfig {
|
||||
}
|
||||
|
||||
pub fn target_triple(&self) -> TargetTriple {
|
||||
TargetTriple::create(&self.target().triple().to_string())
|
||||
let target = self.target();
|
||||
let operating_system =
|
||||
if target.triple().operating_system == wasmer_compiler::OperatingSystem::Darwin {
|
||||
// LLVM detects static relocation + darwin + 64-bit and
|
||||
// force-enables PIC because MachO doesn't support that
|
||||
// combination. They don't check whether they're targeting
|
||||
// MachO, they check whether the OS is set to Darwin.
|
||||
//
|
||||
// Since both linux and darwin use SysV ABI, this should work.
|
||||
wasmer_compiler::OperatingSystem::Linux
|
||||
} else {
|
||||
target.triple().operating_system
|
||||
};
|
||||
let triple = Triple {
|
||||
architecture: target.triple().architecture,
|
||||
vendor: target.triple().vendor.clone(),
|
||||
operating_system,
|
||||
environment: target.triple().environment,
|
||||
binary_format: target_lexicon::BinaryFormat::Elf,
|
||||
};
|
||||
TargetTriple::create(&triple.to_string())
|
||||
}
|
||||
|
||||
/// Generates the target machine for the current target
|
||||
@@ -149,10 +149,11 @@ impl LLVMConfig {
|
||||
.map(|feature| format!("+{}", feature.to_string()))
|
||||
.join(",");
|
||||
|
||||
let llvm_target = InkwellTarget::from_triple(&self.target_triple()).unwrap();
|
||||
let target_triple = self.target_triple();
|
||||
let llvm_target = InkwellTarget::from_triple(&target_triple).unwrap();
|
||||
llvm_target
|
||||
.create_target_machine(
|
||||
&self.target_triple(),
|
||||
&target_triple,
|
||||
"generic",
|
||||
&llvm_cpu_features,
|
||||
self.opt_level,
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
use crate::config::{CompiledFunctionKind, LLVMConfig};
|
||||
use crate::object_file::load_object_file;
|
||||
use crate::translator::abi::{func_type_to_llvm, is_sret, rets_from_call};
|
||||
use crate::translator::abi::{
|
||||
func_type_to_llvm, get_vmctx_ptr_param, is_sret, pack_values_for_register_return,
|
||||
rets_from_call,
|
||||
};
|
||||
use crate::translator::intrinsics::{type_to_llvm, type_to_llvm_ptr, Intrinsics};
|
||||
use inkwell::{
|
||||
attributes::{Attribute, AttributeLoc},
|
||||
@@ -14,7 +17,6 @@ use inkwell::{
|
||||
};
|
||||
use std::cmp;
|
||||
use std::convert::TryInto;
|
||||
use std::iter;
|
||||
use wasm_common::{FunctionType, Type};
|
||||
use wasmer_compiler::{CompileError, FunctionBody};
|
||||
|
||||
@@ -48,7 +50,7 @@ impl FuncTrampoline {
|
||||
let (callee_ty, callee_attrs) = func_type_to_llvm(&self.ctx, &intrinsics, ty)?;
|
||||
let trampoline_ty = intrinsics.void_ty.fn_type(
|
||||
&[
|
||||
intrinsics.ctx_ptr_ty.as_basic_type_enum(), // callee_vmctx ptr
|
||||
intrinsics.ctx_ptr_ty.as_basic_type_enum(), // vmctx ptr
|
||||
callee_ty
|
||||
.ptr_type(AddressSpace::Generic)
|
||||
.as_basic_type_enum(), // callee function address
|
||||
@@ -134,16 +136,11 @@ impl FuncTrampoline {
|
||||
module.set_data_layout(&target_machine.get_target_data().get_data_layout());
|
||||
let intrinsics = Intrinsics::declare(&module, &self.ctx);
|
||||
|
||||
let params = iter::once(Ok(intrinsics.ctx_ptr_ty.as_basic_type_enum()))
|
||||
.chain(
|
||||
ty.params()
|
||||
.iter()
|
||||
.map(|param_ty| type_to_llvm(&intrinsics, *param_ty)),
|
||||
)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let trampoline_ty = intrinsics.void_ty.fn_type(params.as_slice(), false);
|
||||
|
||||
let (trampoline_ty, trampoline_attrs) = func_type_to_llvm(&self.ctx, &intrinsics, ty)?;
|
||||
let trampoline_func = module.add_function("", trampoline_ty, Some(Linkage::External));
|
||||
for (attr, attr_loc) in trampoline_attrs {
|
||||
trampoline_func.add_attribute(attr_loc, attr);
|
||||
}
|
||||
trampoline_func
|
||||
.as_global_value()
|
||||
.set_section(FUNCTION_SECTION);
|
||||
@@ -311,14 +308,6 @@ fn generate_dynamic_trampoline<'ctx>(
|
||||
let builder = context.create_builder();
|
||||
builder.position_at_end(entry_block);
|
||||
|
||||
/*
|
||||
// TODO: remove debugging
|
||||
builder.build_call(
|
||||
intrinsics.debug_trap,
|
||||
&[],
|
||||
"");
|
||||
*/
|
||||
|
||||
// Allocate stack space for the params and results.
|
||||
let values = builder.build_alloca(
|
||||
intrinsics.i128_ty.array_type(cmp::max(
|
||||
@@ -329,6 +318,7 @@ fn generate_dynamic_trampoline<'ctx>(
|
||||
);
|
||||
|
||||
// Copy params to 'values'.
|
||||
let first_user_param = if is_sret(func_sig)? { 2 } else { 1 };
|
||||
for i in 0..func_sig.params().len() {
|
||||
let ptr = unsafe {
|
||||
builder.build_in_bounds_gep(
|
||||
@@ -343,57 +333,99 @@ fn generate_dynamic_trampoline<'ctx>(
|
||||
let ptr = builder
|
||||
.build_bitcast(ptr, type_to_llvm_ptr(intrinsics, func_sig.params()[i])?, "")
|
||||
.into_pointer_value();
|
||||
builder.build_store(ptr, trampoline_func.get_nth_param(i as u32 + 1).unwrap());
|
||||
builder.build_store(
|
||||
ptr,
|
||||
trampoline_func
|
||||
.get_nth_param(i as u32 + first_user_param)
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
|
||||
let callee_ty = intrinsics
|
||||
.void_ty
|
||||
.fn_type(
|
||||
&[
|
||||
intrinsics.ctx_ptr_ty.as_basic_type_enum(),
|
||||
intrinsics.i128_ptr_ty.as_basic_type_enum(),
|
||||
intrinsics.ctx_ptr_ty.as_basic_type_enum(), // vmctx ptr
|
||||
intrinsics.i128_ptr_ty.as_basic_type_enum(), // in/out values ptr
|
||||
],
|
||||
false,
|
||||
)
|
||||
.ptr_type(AddressSpace::Generic)
|
||||
.ptr_type(AddressSpace::Generic);
|
||||
|
||||
let vmctx = trampoline_func.get_nth_param(0).unwrap();
|
||||
let vmctx = get_vmctx_ptr_param(&trampoline_func);
|
||||
let callee = builder
|
||||
.build_load(
|
||||
builder
|
||||
.build_bitcast(vmctx, callee_ty, "")
|
||||
.build_bitcast(vmctx, callee_ty.ptr_type(AddressSpace::Generic), "")
|
||||
.into_pointer_value(),
|
||||
"",
|
||||
)
|
||||
.into_pointer_value();
|
||||
|
||||
let values_ptr = builder.build_pointer_cast(values, intrinsics.i128_ptr_ty, "");
|
||||
builder.build_call(
|
||||
callee,
|
||||
&[vmctx.as_basic_value_enum(), values.as_basic_value_enum()],
|
||||
&[
|
||||
vmctx.as_basic_value_enum(),
|
||||
values_ptr.as_basic_value_enum(),
|
||||
],
|
||||
"",
|
||||
);
|
||||
|
||||
match func_sig.results() {
|
||||
[] => {
|
||||
builder.build_return(None);
|
||||
}
|
||||
[ty] => {
|
||||
let ptr = unsafe {
|
||||
builder.build_in_bounds_gep(
|
||||
values,
|
||||
&[intrinsics.i32_zero, intrinsics.i32_ty.const_int(0, false)],
|
||||
"",
|
||||
)
|
||||
};
|
||||
let ptr = builder
|
||||
.build_bitcast(ptr, type_to_llvm_ptr(intrinsics, *ty)?, "")
|
||||
if func_sig.results().is_empty() {
|
||||
builder.build_return(None);
|
||||
} else {
|
||||
let results = func_sig
|
||||
.results()
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, ty)| {
|
||||
let ptr = unsafe {
|
||||
builder.build_gep(
|
||||
values,
|
||||
&[
|
||||
intrinsics.i32_ty.const_zero(),
|
||||
intrinsics.i32_ty.const_int(idx.try_into().unwrap(), false),
|
||||
],
|
||||
"",
|
||||
)
|
||||
};
|
||||
let ptr = builder.build_pointer_cast(ptr, type_to_llvm_ptr(intrinsics, *ty)?, "");
|
||||
Ok(builder.build_load(ptr, ""))
|
||||
})
|
||||
.collect::<Result<Vec<_>, CompileError>>()?;
|
||||
|
||||
if is_sret(func_sig)? {
|
||||
let sret = trampoline_func
|
||||
.get_first_param()
|
||||
.unwrap()
|
||||
.into_pointer_value();
|
||||
let ret = builder.build_load(ptr, "");
|
||||
builder.build_return(Some(&ret));
|
||||
let mut struct_value = sret
|
||||
.get_type()
|
||||
.get_element_type()
|
||||
.into_struct_type()
|
||||
.get_undef();
|
||||
for (idx, value) in results.iter().enumerate() {
|
||||
let value = builder.build_bitcast(
|
||||
*value,
|
||||
type_to_llvm(&intrinsics, func_sig.results()[idx])?,
|
||||
"",
|
||||
);
|
||||
struct_value = builder
|
||||
.build_insert_value(struct_value, value, idx as u32, "")
|
||||
.unwrap()
|
||||
.into_struct_value();
|
||||
}
|
||||
builder.build_store(sret, struct_value);
|
||||
builder.build_return(None);
|
||||
} else {
|
||||
builder.build_return(Some(&pack_values_for_register_return(
|
||||
&intrinsics,
|
||||
&builder,
|
||||
&results.as_slice(),
|
||||
&trampoline_func.get_type(),
|
||||
)?));
|
||||
}
|
||||
_ => unimplemented!("multi-value return is not yet implemented"),
|
||||
};
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -110,10 +110,12 @@ impl FuncTranslator {
|
||||
let entry = self.ctx.append_basic_block(func, "entry");
|
||||
let start_of_code = self.ctx.append_basic_block(func, "start_of_code");
|
||||
let return_ = self.ctx.append_basic_block(func, "return");
|
||||
let alloca_builder = self.ctx.create_builder();
|
||||
let cache_builder = self.ctx.create_builder();
|
||||
let builder = self.ctx.create_builder();
|
||||
cache_builder.position_at_end(entry);
|
||||
let br = cache_builder.build_unconditional_branch(start_of_code);
|
||||
alloca_builder.position_before(&br);
|
||||
cache_builder.position_before(&br);
|
||||
builder.position_at_end(start_of_code);
|
||||
|
||||
@@ -139,14 +141,23 @@ impl FuncTranslator {
|
||||
} else {
|
||||
1
|
||||
};
|
||||
let mut is_first_alloca = true;
|
||||
let mut insert_alloca = |ty, name| {
|
||||
let alloca = alloca_builder.build_alloca(ty, name);
|
||||
if is_first_alloca {
|
||||
alloca_builder.position_at(entry, &alloca.as_instruction_value().unwrap());
|
||||
is_first_alloca = false;
|
||||
}
|
||||
alloca
|
||||
};
|
||||
|
||||
for idx in 0..wasm_fn_type.params().len() {
|
||||
let ty = wasm_fn_type.params()[idx];
|
||||
let ty = type_to_llvm(&intrinsics, ty)?;
|
||||
let value = func
|
||||
.get_nth_param((idx as u32).checked_add(first_param).unwrap())
|
||||
.unwrap();
|
||||
// TODO: don't interleave allocas and stores.
|
||||
let alloca = cache_builder.build_alloca(ty, "param");
|
||||
let alloca = insert_alloca(ty, "param");
|
||||
cache_builder.build_store(alloca, value);
|
||||
params.push(alloca);
|
||||
}
|
||||
@@ -160,9 +171,8 @@ impl FuncTranslator {
|
||||
.map_err(to_wasm_error)?;
|
||||
let ty = wptype_to_type(ty).map_err(to_compile_error)?;
|
||||
let ty = type_to_llvm(&intrinsics, ty)?;
|
||||
// TODO: don't interleave allocas and stores.
|
||||
for _ in 0..count {
|
||||
let alloca = cache_builder.build_alloca(ty, "local");
|
||||
let alloca = insert_alloca(ty, "local");
|
||||
cache_builder.build_store(alloca, const_zero(ty));
|
||||
locals.push(alloca);
|
||||
}
|
||||
@@ -174,6 +184,7 @@ impl FuncTranslator {
|
||||
let mut fcg = LLVMFunctionCodeGenerator {
|
||||
context: &self.ctx,
|
||||
builder,
|
||||
alloca_builder,
|
||||
intrinsics: &intrinsics,
|
||||
state,
|
||||
function: func,
|
||||
@@ -608,7 +619,7 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
|
||||
let divisor_is_zero = self.builder.build_int_compare(
|
||||
IntPredicate::EQ,
|
||||
right,
|
||||
int_type.const_int(0, false),
|
||||
int_type.const_zero(),
|
||||
"divisor_is_zero",
|
||||
);
|
||||
let should_trap = self.builder.build_or(
|
||||
@@ -633,10 +644,7 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
|
||||
self.intrinsics.expect_i1,
|
||||
&[
|
||||
should_trap.as_basic_value_enum(),
|
||||
self.intrinsics
|
||||
.i1_ty
|
||||
.const_int(0, false)
|
||||
.as_basic_value_enum(),
|
||||
self.intrinsics.i1_ty.const_zero().as_basic_value_enum(),
|
||||
],
|
||||
"should_trap_expect",
|
||||
)
|
||||
@@ -671,7 +679,7 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
|
||||
let should_trap = self.builder.build_int_compare(
|
||||
IntPredicate::EQ,
|
||||
value,
|
||||
int_type.const_int(0, false),
|
||||
int_type.const_zero(),
|
||||
"divisor_is_zero",
|
||||
);
|
||||
|
||||
@@ -681,10 +689,7 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
|
||||
self.intrinsics.expect_i1,
|
||||
&[
|
||||
should_trap.as_basic_value_enum(),
|
||||
self.intrinsics
|
||||
.i1_ty
|
||||
.const_int(0, false)
|
||||
.as_basic_value_enum(),
|
||||
self.intrinsics.i1_ty.const_zero().as_basic_value_enum(),
|
||||
],
|
||||
"should_trap_expect",
|
||||
)
|
||||
@@ -1179,7 +1184,7 @@ fn emit_stack_map<'ctx>(
|
||||
.const_int(stackmap_id as u64, false)
|
||||
.as_basic_value_enum(),
|
||||
);
|
||||
params.push(intrinsics.i32_ty.const_int(0, false).as_basic_value_enum());
|
||||
params.push(intrinsics.i32_ty.const_zero().as_basic_value_enum());
|
||||
|
||||
let locals: Vec<_> = locals.iter().map(|x| x.as_basic_value_enum()).collect();
|
||||
let mut value_semantics: Vec<ValueSemantic> =
|
||||
@@ -1226,7 +1231,7 @@ fn finalize_opcode_stack_map<'ctx>(
|
||||
.i64_ty
|
||||
.const_int(stackmap_id as u64, false)
|
||||
.as_basic_value_enum(),
|
||||
intrinsics.i32_ty.const_int(0, false).as_basic_value_enum(),
|
||||
intrinsics.i32_ty.const_zero().as_basic_value_enum(),
|
||||
],
|
||||
"opcode_stack_map_end",
|
||||
);
|
||||
@@ -1245,6 +1250,7 @@ fn finalize_opcode_stack_map<'ctx>(
|
||||
pub struct LLVMFunctionCodeGenerator<'ctx, 'a> {
|
||||
context: &'ctx Context,
|
||||
builder: Builder<'ctx>,
|
||||
alloca_builder: Builder<'ctx>,
|
||||
intrinsics: &'a Intrinsics<'ctx>,
|
||||
state: State<'ctx>,
|
||||
function: FunctionValue<'ctx>,
|
||||
@@ -1390,7 +1396,7 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
|
||||
);
|
||||
let signal_mem = ctx.signal_mem();
|
||||
let iv = self.builder
|
||||
.build_store(signal_mem, self.context.i8_type().const_int(0 as u64, false));
|
||||
.build_store(signal_mem, self.context.i8_type().const_zero());
|
||||
// Any 'store' can be made volatile.
|
||||
iv.set_volatile(true).unwrap();
|
||||
finalize_opcode_stack_map(
|
||||
@@ -1617,14 +1623,13 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
|
||||
let current_block = self.builder.get_insert_block().ok_or_else(|| {
|
||||
CompileError::Codegen("not currently in a block".to_string())
|
||||
})?;
|
||||
self.builder.build_unconditional_branch(*frame.code_after());
|
||||
|
||||
for phi in frame.phis().to_vec().iter().rev() {
|
||||
let (value, info) = self.state.pop1_extra()?;
|
||||
let value = self.apply_pending_canonicalization(value, info);
|
||||
phi.add_incoming(&[(&value, current_block)])
|
||||
}
|
||||
let frame = self.state.frame_at_depth(0)?;
|
||||
self.builder.build_unconditional_branch(*frame.code_after());
|
||||
}
|
||||
|
||||
let (if_else_block, if_else_state) = if let ControlFrame::IfElse {
|
||||
@@ -1696,19 +1701,7 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
|
||||
self.state.push1(phi.as_basic_value());
|
||||
} else {
|
||||
let basic_ty = phi.as_basic_value().get_type();
|
||||
let placeholder_value = match basic_ty {
|
||||
BasicTypeEnum::IntType(int_ty) => {
|
||||
int_ty.const_int(0, false).as_basic_value_enum()
|
||||
}
|
||||
BasicTypeEnum::FloatType(float_ty) => {
|
||||
float_ty.const_float(0.0).as_basic_value_enum()
|
||||
}
|
||||
_ => {
|
||||
return Err(CompileError::Codegen(
|
||||
"Operator::End phi type unimplemented".to_string(),
|
||||
));
|
||||
}
|
||||
};
|
||||
let placeholder_value = const_zero(basic_ty);
|
||||
self.state.push1(placeholder_value);
|
||||
phi.as_instruction().erase_from_basic_block();
|
||||
}
|
||||
@@ -1721,15 +1714,13 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
|
||||
.ok_or_else(|| CompileError::Codegen("not currently in a block".to_string()))?;
|
||||
|
||||
let frame = self.state.outermost_frame()?;
|
||||
self.builder.build_unconditional_branch(*frame.br_dest());
|
||||
for phi in frame.phis().to_vec().iter().rev() {
|
||||
let (arg, info) = self.state.pop1_extra()?;
|
||||
let arg = self.apply_pending_canonicalization(arg, info);
|
||||
phi.add_incoming(&[(&arg, current_block)]);
|
||||
}
|
||||
|
||||
let frame = self.state.outermost_frame()?;
|
||||
self.builder.build_unconditional_branch(*frame.br_dest());
|
||||
|
||||
self.state.reachable = false;
|
||||
}
|
||||
|
||||
@@ -2090,8 +2081,7 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
|
||||
});
|
||||
|
||||
let params = abi::args_to_call(
|
||||
// TODO: should be an alloca_builder.
|
||||
&self.builder,
|
||||
&self.alloca_builder,
|
||||
func_type,
|
||||
callee_vmctx.into_pointer_value(),
|
||||
&func.get_type().get_element_type().into_function_type(),
|
||||
@@ -2334,8 +2324,7 @@ impl<'ctx, 'a> LLVMFunctionCodeGenerator<'ctx, 'a> {
|
||||
});
|
||||
|
||||
let params = abi::args_to_call(
|
||||
// TODO: should be an alloca_builder.
|
||||
&self.builder,
|
||||
&self.alloca_builder,
|
||||
func_type,
|
||||
ctx_ptr.into_pointer_value(),
|
||||
&llvm_func_type,
|
||||
|
||||
Reference in New Issue
Block a user