Merge pull request #31 from wasmerio/llvm-memorydefinition-cache

Cache lookup of VMMemoryDefinition.
This commit is contained in:
nlewycky
2020-05-18 20:03:48 -07:00
committed by GitHub
3 changed files with 3420 additions and 2772 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -669,17 +669,10 @@ pub enum MemoryCache<'ctx> {
/// The memory moves around.
Dynamic {
ptr_to_base_ptr: PointerValue<'ctx>,
ptr_to_bounds: PointerValue<'ctx>,
minimum: Pages,
maximum: Option<Pages>,
current_length_ptr: PointerValue<'ctx>,
},
/// The memory is always in the same place.
Static {
base_ptr: PointerValue<'ctx>,
bounds: IntValue<'ctx>,
minimum: Pages,
maximum: Option<Pages>,
},
Static { base_ptr: PointerValue<'ctx> },
}
struct TableCache<'ctx> {
@@ -783,129 +776,59 @@ impl<'ctx, 'a> CtxType<'ctx, 'a> {
&self.cache_builder,
&self.offsets,
);
let memory_plan = &memory_plans[index];
*cached_memories.entry(index).or_insert_with(|| {
let (memory_array_ptr_ptr, index, memory_type, minimum, maximum, field_name) = {
let desc = memory_plans.get(index).unwrap();
if let Some(local_mem_index) = wasm_module.local_memory_index(index) {
let byte_offset = intrinsics.i64_ty.const_int(
offsets
.vmctx_vmmemory_definition_base(local_mem_index)
.into(),
false,
);
(
unsafe {
cache_builder.build_gep(
ctx_ptr_value,
&[byte_offset],
"memory_base_ptr_ptr",
)
},
local_mem_index.index() as u64,
desc.style.clone(),
desc.memory.minimum,
desc.memory.maximum,
"context_field_ptr_to_local_memory",
)
let memory_definition_ptr =
if let Some(local_memory_index) = wasm_module.local_memory_index(index) {
let offset = offsets.vmctx_vmmemory_definition(local_memory_index);
let offset = intrinsics.i32_ty.const_int(offset.into(), false);
unsafe { cache_builder.build_gep(ctx_ptr_value, &[offset], "") }
} else {
let byte_offset = intrinsics.i64_ty.const_int(
offsets.vmctx_vmmemory_import_definition(index).into(),
false,
);
(
unsafe {
cache_builder
.build_struct_gep(
ctx_ptr_value,
offset_to_index(offsets.vmctx_imported_memories_begin()),
"memory_array_ptr_ptr",
)
.unwrap()
},
index.index() as u64,
desc.style.clone(),
desc.memory.minimum,
desc.memory.maximum,
"context_field_ptr_to_imported_memory",
)
}
};
let memory_array_ptr = cache_builder
.build_load(memory_array_ptr_ptr, "memory_array_ptr")
.into_pointer_value();
tbaa_label(
module,
intrinsics,
field_name,
memory_array_ptr.as_instruction_value().unwrap(),
None,
);
let const_index = intrinsics.i32_ty.const_int(index, false);
let memory_ptr_ptr = unsafe {
cache_builder.build_in_bounds_gep(
memory_array_ptr,
&[const_index],
"memory_ptr_ptr",
)
};
let memory_ptr = cache_builder
.build_load(memory_ptr_ptr, "memory_ptr")
.into_pointer_value();
tbaa_label(
module,
intrinsics,
"memory_ptr",
memory_ptr.as_instruction_value().unwrap(),
Some(index as u32),
);
let (ptr_to_base_ptr, ptr_to_bounds) = unsafe {
(
cache_builder
.build_struct_gep(memory_ptr, 0, "base_ptr")
.unwrap(),
cache_builder
.build_struct_gep(memory_ptr, 1, "bounds_ptr")
.unwrap(),
)
};
match memory_type {
MemoryStyle::Dynamic => MemoryCache::Dynamic {
ptr_to_base_ptr,
ptr_to_bounds,
minimum,
maximum,
},
MemoryStyle::Static { bound: _ } => {
let base_ptr = cache_builder
.build_load(ptr_to_base_ptr, "base")
let offset = offsets.vmctx_vmmemory_import(index);
let offset = intrinsics.i32_ty.const_int(offset.into(), false);
let memory_definition_ptr_ptr =
unsafe { cache_builder.build_gep(ctx_ptr_value, &[offset], "") };
let memory_definition_ptr_ptr = cache_builder
.build_bitcast(
memory_definition_ptr_ptr,
intrinsics.i8_ptr_ty.ptr_type(AddressSpace::Generic),
"",
)
.into_pointer_value();
let bounds = cache_builder
.build_load(ptr_to_bounds, "bounds")
.into_int_value();
tbaa_label(
module,
intrinsics,
"static_memory_base",
base_ptr.as_instruction_value().unwrap(),
Some(index as u32),
);
tbaa_label(
module,
intrinsics,
"static_memory_bounds",
bounds.as_instruction_value().unwrap(),
Some(index as u32),
);
MemoryCache::Static {
base_ptr,
bounds,
minimum,
maximum,
}
cache_builder
.build_load(memory_definition_ptr_ptr, "")
.into_pointer_value()
};
let memory_definition_ptr = cache_builder
.build_bitcast(
memory_definition_ptr,
intrinsics.vmmemory_definition_ptr_ty,
"",
)
.into_pointer_value();
let base_ptr = cache_builder
.build_struct_gep(
memory_definition_ptr,
intrinsics.vmmemory_definition_base_element,
"",
)
.unwrap();
if memory_plan.style == MemoryStyle::Dynamic {
let current_length_ptr = cache_builder
.build_struct_gep(
memory_definition_ptr,
intrinsics.vmmemory_definition_current_length_element,
"",
)
.unwrap();
MemoryCache::Dynamic {
ptr_to_base_ptr: base_ptr,
current_length_ptr,
}
} else {
let base_ptr = cache_builder.build_load(base_ptr, "").into_pointer_value();
// TODO: tbaa
MemoryCache::Static { base_ptr }
}
})
}