updated to latest openvr sdk, updated to use bindgen, ongoing refactoring

This commit is contained in:
Rene Eichhorn
2016-05-01 18:35:07 +02:00
parent 9b080084c2
commit ff9743f9cf
12 changed files with 3106 additions and 1669 deletions

View File

@ -6,6 +6,7 @@ authors = [
"Erick Tryzelaar", "Erick Tryzelaar",
"Rene Eichhorn" "Rene Eichhorn"
] ]
build = "src/sys/build.rs"
[lib] [lib]
name = "openvr" name = "openvr"

View File

@ -7,7 +7,7 @@ rust-openvr is a binding for openvr. It's still in progress. Tests are automatic
Also my private jenkins is running these test on Ubuntu 14.04 as well, every successful build will be pushed to its branch (stable, nightly, beta). For good practice always use either stable, beta or nightly instead of master! Also my private jenkins is running these test on Ubuntu 14.04 as well, every successful build will be pushed to its branch (stable, nightly, beta). For good practice always use either stable, beta or nightly instead of master!
## [Link to the documentation](http://auruss.github.io/rust-openvr/openvr/index.html) ## [Link to the documentation](http://auruss.github.io/rust-openvr/openvr/index.html)
## Current version: OpenVR SDK 0.9.11 (will be updated soon to newest!) ## Current sdk version: OpenVR SDK 0.9.19
Building Building
-------- --------
@ -32,69 +32,23 @@ Using rust-openvr
extern crate openvr; extern crate openvr;
use openvr::{SensorCapabilities, Ovr};
fn main() { fn main() {
// Initalize the Oculus VR library // Initialize system subsystem
let ovr = match Ovr::init() { let system = match openvr::init() {
Some(ovr) => ovr, Ok(sys) => sys,
None => { Err(err) => {
println!("Could not initialize OpenVR SDK"); println!("Could not initialize OpenVR SDK: \n\t{:?}", err);
return; return;
} }
}; };
// get the first available HMD device, returns None // accessing other sub systems
// if no HMD device is currently plugged in let ext = openvr::extended_display();
let hmd = match ovr.first_hmd() {
Some(hmd) => hmd,
None => {
println!("Could not get hmd");
return;
}
};
// start the sensor recording, Require orientation tracking // ..
let started = hmd.start_sensor(SensorCapabilities::new().set_orientation(true),
SensorCapabilities::new().set_orientation(true));
if !started {
println!("Could not start sensor");
return;
}
} }
``` ```
# Render loop # Examples
For data collection examples/test.rs can be used.
The OpenVR SDK will handle most of the heavy lifting of the barrel distortion. For an actual opengl implementation see examples/opengl.rs (WIP)
```rust
fn render(frame_index: uint, hmd: &ovr::Hmd, base_view: &Matrix4<f32>) {
// start a new frame, the frame_index should increment each frame
let frame_timing = hmd.begin_frame(frame_index);
let desc = hmd.get_description();
for &eye in [ovr::EyeLeft, ovr::EyeRight].iter() {
// start rendering a new eye, this will give the most current
// copy of the pose from the HMD tracking sensor
let pose = self.window.get_hmd().begin_eye_render(eye);
// base_view * pose * eye_view_adjustment
let view = base_view.mul_m(&pose.orientation.to_matrix4())
.mul_m(&Matrix4::translate(&eye.view_adjust));
let projection = desc.eye_fovs.eye(eye).default_eye_fov;
// render to texture
render();
let texture = ovr::Texture(width, height,
viewport_offset_x, viewport_offset_y,
viewport_width, viewport_height,
opengl_texture_id);
hmd.end_eye_render(eye, pose, &texture);
}
// this will swap the buffers and frame sync
hmd.end_frame();
}
```

View File

@ -16,7 +16,7 @@ fn print_matrix_4x3(offset: u32, mat: [[f32; 4]; 3]) {
} }
fn main() { fn main() {
let ivr = match openvr::IVRSystem::init() { let system = match openvr::init() {
Ok(ivr) => ivr, Ok(ivr) => ivr,
Err(err) => { Err(err) => {
println!("Failed to create IVR subsystem {:?}", err); println!("Failed to create IVR subsystem {:?}", err);
@ -24,27 +24,26 @@ fn main() {
} }
}; };
println!("IVR was created"); println!("IVRSystem was created");
println!("\tbounds: {:?}", ivr.bounds());
println!("\trecommended size: {:?}", ivr.recommended_render_target_size()); println!("\trecommended size: {:?}", system.recommended_render_target_size());
println!("\teye output: {:?} {:?}", ivr.eye_viewport(openvr::Eye::Left), ivr.eye_viewport(openvr::Eye::Right)); println!("\tvsync: {:?}", system.time_since_last_vsync());
println!("\tvsync: {:?}", ivr.time_since_last_vsync());
print!("\tprojection matrix left "); print!("\tprojection matrix left ");
print_matrix_4x4(31, ivr.projection_matrix(openvr::Eye::Left, 0.1, 100.)); print_matrix_4x4(31, system.projection_matrix(openvr::Eye::Left, 0.1, 100.));
print!("\tprojection matrix right "); print!("\tprojection matrix right ");
print_matrix_4x4(31, ivr.projection_matrix(openvr::Eye::Right, 0.1, 100.)); print_matrix_4x4(31, system.projection_matrix(openvr::Eye::Right, 0.1, 100.));
print!("\teye_to_head "); print!("\teye_to_head ");
print_matrix_4x3(8+12, ivr.eye_to_head_transform(openvr::Eye::Left)); print_matrix_4x3(8+12, system.eye_to_head_transform(openvr::Eye::Left));
print!("\tposes "); print!("\tposes ");
print_matrix_4x3(8+6, ivr.tracked_devices(0.).as_slice()[0].to_device); print_matrix_4x3(8+6, system.tracked_devices(0.).as_slice()[0].to_device);
println!("Distortion example"); println!("Distortion example");
for u in 0..2 { for u in 0..2 {
for v in 0..2 { for v in 0..2 {
let pos = ivr.compute_distortion( let pos = system.compute_distortion(
openvr::Eye::Left, openvr::Eye::Left,
u as f32 / 4., u as f32 / 4.,
v as f32 / 4., v as f32 / 4.,
@ -54,6 +53,17 @@ fn main() {
println!(""); println!("");
} }
let ext = match openvr::extended_display() {
Ok(ext) => ext,
Err(err) => {
println!("Failed to create IVRExtendedDisplay subsystem {:?}", err);
return;
}
};
println!("\nIVRExtendedDisplay was created");
println!("\tbounds: {:?}", ext.window_bounds());
println!("\teye output: {:?} {:?}", ext.eye_viewport(openvr::Eye::Left), ext.eye_viewport(openvr::Eye::Right));
/*
println!("Trying to create a compositor"); println!("Trying to create a compositor");
match ivr.compositor() { match ivr.compositor() {
Err(err) => println!("Could not create compositor {:?}", err), Err(err) => println!("Could not create compositor {:?}", err),
@ -65,7 +75,9 @@ fn main() {
println!("\tgamma value = {}", comp.get_gamma()); println!("\tgamma value = {}", comp.get_gamma());
} }
} }
*/
openvr::shutdown();
println!("Done! \\o/"); println!("Done! \\o/");

2
openvr

Submodule openvr updated: 061cf411ee...a6c91ef973

5
scripts/binding.h Normal file
View File

@ -0,0 +1,5 @@
// This header is used for bindgen to automatically generate the openvr c binding
// bindgen -match openvr_capi.h scripts/binding.h -o binding.rs
#include <stdbool.h>
#include "../openvr/headers/openvr_capi.h"

View File

@ -1,144 +0,0 @@
import json
import re
array = re.compile(r"(.+)\[([0-9]+)\]")
data = {}
with open("openvr/headers/openvr_api.json") as f:
data = json.loads(f.read())
type_mapping = {
'int': 'i32',
'uint64_t': 'u64',
'uint32_t': 'u32',
'uint16_t': 'u16',
'uint8_t': 'u8',
'int64_t': 'i64',
'int32_t': 'i32',
'int16_t': 'i16',
'int8_t': 'i8',
'double': 'f64',
'float': 'f32',
'_Bool': 'bool',
'unsigned short': 'u16',
'const char': 'u8',
'void': '()',
# I'm lazy
'unsigned char *': '*const u8',
'char *': '*const u8',
'char **': '*const *const u8',
'const uint16_t *': '*const u16',
'const uint8_t *': '*const u8',
'const struct vr::HmdVector2_t *': '*const HmdVector2_t',
'const struct vr::RenderModel_Vertex_t *': '*const RenderModel_Vertex_t',
'float [3][4]': '[[f32; 4]; 3]',
'float [16]': '[f32; 16]',
'float [4]': '[f32; 4]',
'float [3]': '[f32; 3]',
'float [2]': '[f32; 2]',
'double [3]': '[f64; 3]',
'union VREvent_Data_t': '[u8; 16]'
}
def parse_type(s):
if s.startswith("struct"):
return parse_type(s[7:])
elif s.startswith("vr::"):
return parse_type(s[4:])
elif s.startswith('enum'):
return parse_type(s.split()[1])
elif s.startswith("const "):
return parse_type(s[6:])
elif s in type_mapping:
return type_mapping[s]
elif s[-2:] == ' *':
return "*mut " + parse_type(s[:-2])
elif s[-2:] == ' &':
return "*const " + parse_type(s[:-2])
elif array.match(s):
m = array.match(s)
return "[%s; %s]" % (parse_type(m.group(1)), m.group(2))
return s
def parse_class(s):
if s.startswith("vr::"):
return 'VR_' + s[4:]
return s
def shorten_enum(parent, name):
split = name.split('_')
if len(split) == 2:
return split[-1]
elif len(split) > 2:
return '_'.join(split[1:])
return name
print """
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(improper_ctypes)]
#[link(name = "openvr_api")]
extern {}
extern "C" {
pub fn VR_Init(err: *mut HmdError) -> *const ();
pub fn VR_Shutdown();
pub fn VR_IsHmdPresent() -> bool;
pub fn VR_GetStringForHmdError(err: HmdError) -> *const u8;
pub fn VR_GetGenericInterface(name: *const u8, err: *mut HmdError) -> *const ();
}
"""
for d in data['typedefs']:
if parse_type(d['typedef']) == parse_type(d['type']):
continue
print "pub type %s = %s;" % (parse_type(d['typedef']), parse_type(d['type']))
for d in data['enums']:
found = set()
print "#[repr(C)]\n#[derive(Debug)]\npub enum %s {" % parse_type(d['enumname'])
for v in d['values']:
if v['value'] in found:
continue
found.add(v['value'])
print "\t%s = %s," % (shorten_enum(d['enumname'], v['name']), v['value'])
print "}\n"
for s in data['structs']:
if s['struct'] == "vr::(anonymous)":
continue
print "#[repr(C)]\npub struct %s {" % parse_type(s['struct'])
for f in s['fields']:
print "\t//%s" % f['fieldtype']
print "\tpub %s: %s," % (f['fieldname'], parse_type(f['fieldtype']))
print "}"
print "extern \"C\" {"
for m in data['methods']:
print '\tpub fn ' + parse_class(m['classname']) + '_' + m['methodname'] + "(ptr: *const (),",
s = []
for p in m.get('params', []):
if p['paramname'] == 'type':
p['paramname'] = '_type'
s += ["%s: %s" % (p['paramname'], parse_type(p['paramtype']))]
print "%s)" % (', '.join(s)),
if 'returntype' in m and m['returntype'] == 'void':
print ";"
elif 'returntype' in m:
print "-> %s;" % parse_type(m['returntype'])
else:
print ";"
print "}"

99
src/common.rs Normal file
View File

@ -0,0 +1,99 @@
use openvr_sys;
use openvr_sys::Enum_EVREye::*;
#[derive(Debug, Copy, Clone)]
pub struct Size {
pub width: u32,
pub height: u32
}
#[derive(Debug, Copy, Clone)]
pub struct Position {
pub x: i32,
pub y: i32
}
#[derive(Debug, Copy, Clone)]
pub struct Rectangle {
pub position: Position,
pub size: Size
}
#[derive(Debug, Copy, Clone)]
pub struct DistortionCoordinates {
pub red: [f32; 2],
pub green: [f32; 2],
pub blue: [f32; 2],
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum Eye {
Left, Right
}
impl Eye {
/// Convert a eye to a HmdEye
pub fn to_raw(&self) -> openvr_sys::EVREye {
match self {
&Eye::Left => EVREye_Eye_Left,
&Eye::Right => EVREye_Eye_Right,
}
}
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct TextureBounds {
pub u_min: f32,
pub u_max: f32,
pub v_min: f32,
pub v_max: f32
}
impl TextureBounds {
/// Convert a bounds to a openvr_bounds
fn to_raw(self) -> openvr_sys::VRTextureBounds_t {
openvr_sys::VRTextureBounds_t{
uMin: self.u_min,
uMax: self.u_max,
vMin: self.v_min,
vMax: self.v_max
}
}
}
#[derive(Debug, Copy, Clone)]
pub struct TrackedDevicePose {
pub to_device: [[f32; 4]; 3],
pub velocity: [f32; 3],
pub angular_velocity: [f32; 3],
pub is_valid: bool,
pub is_connected: bool,
}
#[derive(Debug, Copy, Clone)]
pub struct TrackedDevicePoses {
pub count: usize,
pub poses: [TrackedDevicePose; 16],
}
impl TrackedDevicePoses {
pub fn as_slice(&self) -> &[TrackedDevicePose] {
&self.poses[0..self.count]
}
}
pub unsafe fn to_tracked(data: [openvr_sys::TrackedDevicePose_t; 16]) -> TrackedDevicePoses {
use std;
let mut out: TrackedDevicePoses = std::mem::zeroed();
for (i, d) in data.iter().enumerate() {
if d.bDeviceIsConnected > 0 {
out.count = i + 1;
}
out.poses[i].is_connected = d.bDeviceIsConnected > 0;
out.poses[i].is_valid = d.bPoseIsValid > 0;
out.poses[i].to_device = d.mDeviceToAbsoluteTracking.m;
out.poses[i].velocity = d.vVelocity.v;
out.poses[i].angular_velocity = d.vAngularVelocity.v;
}
out
}

69
src/compositor.rs Normal file
View File

@ -0,0 +1,69 @@
/// A VR compositor
pub struct Compositor(*const ());
impl Compositor {
/// Check to see if the compositor is fullscreen
pub fn is_fullscreen(&self) -> bool {
unsafe { openvr_sys::VR_IVRCompositor_IsFullscreen(self.0) }
}
/// Check if vsync in enabled
pub fn get_vsync(&self) -> Option<u64> {
unsafe { openvr_sys::VR_IVRCompositor_GetVSync(self.0) }
}
/// Set the vsync value
pub fn set_vsync(&self, v: bool) {
unsafe { openvr_sys::VR_IVRCompositor_SetVSync(self.0, v) }
}
/// Check if vsync in enabled
pub fn can_render_scene(&self) -> bool {
unsafe { openvr_sys::VR_IVRCompositor_CanRenderScene(self.0) }
}
/// Get the gamma value
pub fn get_gamma(&self) -> f32 {
unsafe { openvr_sys::VR_IVRCompositor_GetGamma(self.0) }
}
/// Get the gamma value
pub fn set_gamma(&self, v: f32) {
unsafe { openvr_sys::VR_IVRCompositor_SetGamma(self.0, v) }
}
/// Submit an eye to the render
pub fn submit(&self, eye: Eye, texture: usize, bounds: TextureBounds) {
let mut b = bounds.to_raw();
let e = eye.to_raw();
unsafe {
use std::mem;
let t = mem::transmute(texture);
openvr_sys::VR_IVRCompositor_Submit(
self.0,
e,
openvr_sys::GraphicsAPIConvention::OpenGL,
t,
&mut b as *mut openvr_sys::VRTextureBounds_t,
openvr_sys::VRSubmitFlags_t::Default
);
}
}
/// Get the poses
pub fn wait_get_poses(&self) -> TrackedDevicePoses {
unsafe {
let mut data: [openvr_sys::TrackedDevicePose_t; 16] = std::mem::zeroed();
openvr_sys::VR_IVRCompositor_WaitGetPoses(
self.0,
&mut data[0],
16,
std::ptr::null_mut(),
0
);
to_tracked(data)
}
}
}

57
src/extended_display.rs Normal file
View File

@ -0,0 +1,57 @@
use openvr_sys;
use common::*;
pub struct IVRExtendedDisplay(*const ());
impl IVRExtendedDisplay {
pub unsafe fn from_raw(ptr: *const ()) -> Self {
IVRExtendedDisplay(ptr as *mut ())
}
/// Get the window bounds
pub fn window_bounds(&self) -> Rectangle {
unsafe {
let ext = * { self.0 as *mut openvr_sys::Struct_VR_IVRExtendedDisplay_FnTable };
let mut size = Size{width: 0, height: 0};
let mut pos = Position{x: 0, y: 0};
ext.GetWindowBounds.unwrap()(
&mut pos.x,
&mut pos.y,
&mut size.width,
&mut size.height
);
Rectangle {
position: pos,
size: size
}
}
}
/// Get eye viewport size
pub fn eye_viewport(&self, eye: Eye) -> Rectangle {
use std::mem;
unsafe {
let ext = * { self.0 as *mut openvr_sys::Struct_VR_IVRExtendedDisplay_FnTable };
let mut size = Size{width: 0, height: 0};
let mut pos = Position{x: 0, y: 0};
ext.GetEyeOutputViewport.unwrap()(
eye.to_raw(),
mem::transmute(&mut pos.x),
mem::transmute(&mut pos.y),
&mut size.width,
&mut size.height
);
Rectangle {
position: pos,
size: size
}
}
}
}

View File

@ -1,361 +1,89 @@
extern crate openvr_sys; extern crate openvr_sys;
use openvr_sys::Enum_EVRInitError::*;
use openvr_sys::Enum_EVRApplicationType::*;
pub struct IVRSystem(*const ()); pub mod common;
pub mod system;
pub mod extended_display;
#[derive(Debug, Copy, Clone)] pub use system::IVRSystem;
pub struct Size { pub use extended_display::IVRExtendedDisplay;
pub width: u32,
pub height: u32
}
#[derive(Debug, Copy, Clone)] pub use common::Eye;
pub struct Position {
pub x: i32,
pub y: i32
}
#[derive(Debug, Copy, Clone)] /// Inits the open vr interface and returns the system
pub struct Rectangle { pub fn init() -> Result<system::IVRSystem, openvr_sys::HmdError> {
pub position: Position, let mut err = EVRInitError_VRInitError_None;
pub size: Size let app_type = EVRApplicationType_VRApplication_Scene;
}
#[derive(Debug, Copy, Clone)] // try to initialize base vr eco
pub struct DistortionCoordinates { unsafe {
pub red: [f32; 2], openvr_sys::VR_InitInternal(&mut err, app_type);
pub green: [f32; 2], };
pub blue: [f32; 2],
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)] // check for errors
pub enum Eye { match err {
Left, Right EVRInitError_VRInitError_None => {
} // get system
let result = system();
match result {
impl Eye { Ok(sys) => {
/// Convert a eye to a HmdEye return Ok(sys);
fn to_raw(&self) -> openvr_sys::Hmd_Eye { },
match self { Err(err) => {
&Eye::Left => openvr_sys::Hmd_Eye::Left, return Err(err);
&Eye::Right => openvr_sys::Hmd_Eye::Right, }
}
}
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct TextureBounds {
pub u_min: f32,
pub u_max: f32,
pub v_min: f32,
pub v_max: f32
}
impl TextureBounds {
/// Convert a bounds to a openvr_bounds
fn to_raw(self) -> openvr_sys::VRTextureBounds_t {
openvr_sys::VRTextureBounds_t{
uMin: self.u_min,
uMax: self.u_max,
vMin: self.v_min,
vMax: self.v_max
}
}
}
#[derive(Debug, Copy, Clone)]
pub struct TrackedDevicePose {
pub to_device: [[f32; 4]; 3],
pub velocity: [f32; 3],
pub angular_velocity: [f32; 3],
pub is_valid: bool,
pub is_connected: bool,
}
#[derive(Debug, Copy, Clone)]
pub struct TrackedDevicePoses {
pub count: usize,
pub poses: [TrackedDevicePose; 16],
}
impl TrackedDevicePoses {
pub fn as_slice(&self) -> &[TrackedDevicePose] {
&self.poses[0..self.count]
}
}
unsafe fn to_tracked(data: [openvr_sys::TrackedDevicePose_t; 16]) -> TrackedDevicePoses {
let mut out: TrackedDevicePoses = std::mem::zeroed();
for (i, d) in data.iter().enumerate() {
if d.bDeviceIsConnected {
out.count = i + 1;
}
out.poses[i].is_connected = d.bDeviceIsConnected;
out.poses[i].is_valid = d.bPoseIsValid;
out.poses[i].to_device = d.mDeviceToAbsoluteTracking.m;
out.poses[i].velocity = d.vVelocity.v;
out.poses[i].angular_velocity = d.vAngularVelocity.v;
}
out
}
impl IVRSystem {
/// Initialize the IVR System
pub fn init() -> Result<IVRSystem, openvr_sys::HmdError> {
let mut err = openvr_sys::HmdError::None;
let ptr = unsafe {
openvr_sys::VR_Init(&mut err as *mut openvr_sys::HmdError)
};
if ptr.is_null() {
Err(err)
} else {
Ok(IVRSystem(ptr))
}
}
/// Get the window bounds
pub fn bounds(&self) -> Rectangle {
unsafe {
let mut size = Size{width: 0, height: 0};
let mut pos = Position{x: 0, y: 0};
openvr_sys::VR_IVRSystem_GetWindowBounds(
self.0,
&mut pos.x,
&mut pos.y,
&mut size.width,
&mut size.height
);
Rectangle {
position: pos,
size: size
} }
},
_ => {
return Err(err);
} }
} };
}
/// Get the recommended render target size /// Shutdowns all openvr related systems
pub fn recommended_render_target_size(&self) -> Size { pub fn shutdown() {
unsafe { unsafe {
let mut size = Size{width: 0, height: 0}; openvr_sys::VR_ShutdownInternal();
openvr_sys::VR_IVRSystem_GetRecommendedRenderTargetSize(
self.0,
&mut size.width,
&mut size.height
);
size
}
} }
}
/// Get eye viewport size /// gets the current vr system interface (initialization is required beforehand)
pub fn eye_viewport(&self, eye: Eye) -> Rectangle { pub fn system() -> Result<system::IVRSystem, openvr_sys::HmdError> {
use std::mem; let mut err = EVRInitError_VRInitError_None;
unsafe { let name = std::ffi::CString::new("FnTable:IVRSystem_012").unwrap();
let mut size = Size{width: 0, height: 0}; let ptr = unsafe {
let mut pos = Position{x: 0, y: 0}; openvr_sys::VR_GetGenericInterface(name.as_ptr(), &mut err)
openvr_sys::VR_IVRSystem_GetEyeOutputViewport( };
self.0,
eye.to_raw(), match err {
mem::transmute(&mut pos.x), EVRInitError_VRInitError_None => {
mem::transmute(&mut pos.y), unsafe {
&mut size.width, return Ok(IVRSystem::from_raw(ptr as *const ()));
&mut size.height
);
Rectangle {
position: pos,
size: size
} }
} },
} _ => {
return Err(err);
/// Get the projection matrix for an eye
/// supply the near and the far position
/// assumes opengl conventions
pub fn projection_matrix(&self, eye: Eye, near: f32, far: f32) -> [[f32; 4]; 4] {
unsafe {
let mat = openvr_sys::VR_IVRSystem_GetProjectionMatrix(
self.0,
eye.to_raw(),
near,
far,
openvr_sys::GraphicsAPIConvention::OpenGL
);
mat.m
}
}
/// Computes the distortion caused by the optics
pub fn compute_distortion(&self, eye: Eye, u: f32, v: f32) -> DistortionCoordinates {
unsafe {
let coord = openvr_sys::VR_IVRSystem_ComputeDistortion(
self.0,
eye.to_raw(),
u, v
);
DistortionCoordinates {
red: coord.rfRed,
blue: coord.rfBlue,
green: coord.rfGreen
}
}
}
/// Computes the distortion caused by the optics
pub fn eye_to_head_transform(&self, eye: Eye) -> [[f32; 4]; 3] {
unsafe {
let mat = openvr_sys::VR_IVRSystem_GetEyeToHeadTransform(
self.0,
eye.to_raw(),
);
mat.m
}
}
/// Computes the distortion caused by the optics
pub fn time_since_last_vsync(&self) -> Option<(f32, u64)> {
unsafe {
let mut frame = 0;
let mut sync = 0.;
let found = openvr_sys::VR_IVRSystem_GetTimeSinceLastVsync(
self.0,
&mut sync,
&mut frame
);
if found {
Some((sync, frame))
} else {
None
}
}
}
/// Fetch the tracked results from the HMD
pub fn tracked_devices(&self, time: f32) -> TrackedDevicePoses {
unsafe {
let mut data: [openvr_sys::TrackedDevicePose_t; 16] = std::mem::zeroed();
openvr_sys::VR_IVRSystem_GetDeviceToAbsoluteTrackingPose(
self.0,
openvr_sys::TrackingUniverseOrigin::TrackingUniverseSeated,
time,
&mut data[0],
16
);
to_tracked(data)
}
}
/// If the device supports a compositor return a instance
pub fn compositor(&self) -> Result<Compositor, openvr_sys::HmdError> {
unsafe {
let mut err = openvr_sys::HmdError::None;
let name = std::ffi::CString::new("IVRCompositor_006").unwrap();
let ptr = openvr_sys::VR_GetGenericInterface(name.as_ptr() as *const u8, &mut err as *mut openvr_sys::HmdError);
match err {
openvr_sys::HmdError::None => Ok(Compositor(ptr)),
err => Err(err)
}
}
}
/// get frequency of hmd in hz
pub fn display_frequency(&self) -> f32 {
unsafe {
openvr_sys::VR_IVRSystem_GetFloatTrackedDeviceProperty(
self.0,
0,
openvr_sys::TrackedDeviceProperty::DisplayFrequency_Float,
std::ptr::null_mut()
)
}
}
/// get the time vsync to phonts
pub fn vsync_to_photons(&self) -> f32 {
unsafe {
openvr_sys::VR_IVRSystem_GetFloatTrackedDeviceProperty(
self.0,
0,
openvr_sys::TrackedDeviceProperty::SecondsFromVsyncToPhotons_Float,
std::ptr::null_mut()
)
} }
} }
} }
impl Drop for IVRSystem { /// gets the current vr extended display interface (initialization is required beforehand)
fn drop(&mut self) { pub fn extended_display() -> Result<IVRExtendedDisplay, openvr_sys::HmdError> {
unsafe { let mut err = EVRInitError_VRInitError_None;
println!("Trying to shutdown openvr"); let name = std::ffi::CString::new("FnTable:IVRExtendedDisplay_001").unwrap();
openvr_sys::VR_Shutdown(); let ptr = unsafe {
println!("Should be done now."); openvr_sys::VR_GetGenericInterface(name.as_ptr(), &mut err)
} };
}
} match err {
EVRInitError_VRInitError_None => {
/// A VR compositor unsafe {
pub struct Compositor(*const ()); return Ok(IVRExtendedDisplay::from_raw(ptr as *const ()));
}
impl Compositor { },
/// Check to see if the compositor is fullscreen _ => {
pub fn is_fullscreen(&self) -> bool { return Err(err);
unsafe { openvr_sys::VR_IVRCompositor_IsFullscreen(self.0) }
}
/// Check if vsync in enabled
pub fn get_vsync(&self) -> bool {
unsafe { openvr_sys::VR_IVRCompositor_GetVSync(self.0) }
}
/// Set the vsync value
pub fn set_vsync(&self, v: bool) {
unsafe { openvr_sys::VR_IVRCompositor_SetVSync(self.0, v) }
}
/// Check if vsync in enabled
pub fn can_render_scene(&self) -> bool {
unsafe { openvr_sys::VR_IVRCompositor_CanRenderScene(self.0) }
}
/// Get the gamma value
pub fn get_gamma(&self) -> f32 {
unsafe { openvr_sys::VR_IVRCompositor_GetGamma(self.0) }
}
/// Get the gamma value
pub fn set_gamma(&self, v: f32) {
unsafe { openvr_sys::VR_IVRCompositor_SetGamma(self.0, v) }
}
/// Submit an eye to the render
pub fn submit(&self, eye: Eye, texture: usize, bounds: TextureBounds) {
let mut b = bounds.to_raw();
let e = eye.to_raw();
unsafe {
use std::mem;
let t = mem::transmute(texture);
openvr_sys::VR_IVRCompositor_Submit(
self.0,
e,
openvr_sys::GraphicsAPIConvention::OpenGL,
t,
&mut b as *mut openvr_sys::VRTextureBounds_t,
openvr_sys::VRSubmitFlags_t::Default
);
}
}
/// Get the poses
pub fn wait_get_poses(&self) -> TrackedDevicePoses {
unsafe {
let mut data: [openvr_sys::TrackedDevicePose_t; 16] = std::mem::zeroed();
openvr_sys::VR_IVRCompositor_WaitGetPoses(
self.0,
&mut data[0],
16,
std::ptr::null_mut(),
0
);
to_tracked(data)
} }
} }
} }

File diff suppressed because it is too large Load Diff

108
src/system.rs Normal file
View File

@ -0,0 +1,108 @@
use openvr_sys;
use openvr_sys::Enum_EGraphicsAPIConvention::*;
use openvr_sys::Enum_ETrackingUniverseOrigin::*;
use common::*;
pub struct IVRSystem(*const ());
impl IVRSystem {
pub unsafe fn from_raw(ptr: *const ()) -> Self {
IVRSystem(ptr as *mut ())
}
/// Get the recommended render target size
pub fn recommended_render_target_size(&self) -> Size {
unsafe {
let system = * { self.0 as *mut openvr_sys::Struct_VR_IVRSystem_FnTable };
let mut size = Size{width: 0, height: 0};
system.GetRecommendedRenderTargetSize.unwrap()(
&mut size.width,
&mut size.height
);
size
}
}
/// Get the projection matrix for an eye
/// supply the near and the far position
/// assumes opengl conventions
pub fn projection_matrix(&self, eye: Eye, near: f32, far: f32) -> [[f32; 4]; 4] {
unsafe {
let system = * { self.0 as *mut openvr_sys::Struct_VR_IVRSystem_FnTable };
let mat = system.GetProjectionMatrix.unwrap()(
eye.to_raw(),
near,
far,
EGraphicsAPIConvention_API_OpenGL
);
mat.m
}
}
/// Computes the distortion caused by the optics
pub fn compute_distortion(&self, eye: Eye, u: f32, v: f32) -> DistortionCoordinates {
unsafe {
let system = * { self.0 as *mut openvr_sys::Struct_VR_IVRSystem_FnTable };
let coord = system.ComputeDistortion.unwrap()(
eye.to_raw(),
u, v
);
DistortionCoordinates {
red: coord.rfRed,
blue: coord.rfBlue,
green: coord.rfGreen
}
}
}
/// Computes the distortion caused by the optics
pub fn eye_to_head_transform(&self, eye: Eye) -> [[f32; 4]; 3] {
unsafe {
let system = * { self.0 as *mut openvr_sys::Struct_VR_IVRSystem_FnTable };
let mat = system.GetEyeToHeadTransform.unwrap()(
eye.to_raw(),
);
mat.m
}
}
/// Computes the distortion caused by the optics
pub fn time_since_last_vsync(&self) -> Option<(f32, u64)> {
unsafe {
let system = * { self.0 as *mut openvr_sys::Struct_VR_IVRSystem_FnTable };
let mut frame = 0;
let mut sync = 0.;
let found = system.GetTimeSinceLastVsync.unwrap()(
&mut sync,
&mut frame
);
if found > 0 {
Some((sync, frame))
} else {
None
}
}
}
/// Fetch the tracked results from the HMD
pub fn tracked_devices(&self, time: f32) -> TrackedDevicePoses {
use std;
unsafe {
let system = * { self.0 as *mut openvr_sys::Struct_VR_IVRSystem_FnTable };
let mut data: [openvr_sys::TrackedDevicePose_t; 16] = std::mem::zeroed();
system.GetDeviceToAbsoluteTrackingPose.unwrap()(
ETrackingUniverseOrigin_TrackingUniverseSeated,
time,
&mut data[0],
16
);
to_tracked(data)
}
}
}