Update to 1.0.10, fix travis (#31)

* Update to OpenVR 1.0.10

* Typo fix

* Missing doc comment

* Delete dead examples, cleanup

* Delete dead code

* Remove lifetimes from subsystems

OpenVR must be shut down manually, which invalidates outstanding
subsystem handles regardless of lifetimes, rendering the ergonomic
sacrifice pointless.

Future work: make shutdown safe by inserting checks before every
OpenVR call.

* Depend on our own openvr-sys

* Update metadata

* Update readme

* More detailed safety notes

* Depend on released openvr-sys
This commit is contained in:
Benjamin Saunders
2017-10-08 14:56:32 -07:00
committed by GitHub
parent eb1f18a4ea
commit 2098bcc257
14 changed files with 87 additions and 823 deletions

View File

@ -13,12 +13,15 @@ documentation = "http://rust-openvr.github.io/rust-openvr/openvr/index.html"
homepage = "https://github.com/rust-openvr/rust-openvr"
repository = "https://github.com/rust-openvr/rust-openvr"
description = "A safe binding for openvr."
description = "A high-level binding for openvr."
keywords = ["vr", "vive", "steamvr"]
categories = [ "hardware-support", "api-bindings" ]
[badges]
travis-ci = { repository = "rust-openvr/rust-openvr" }
maintenance = { status = "passively-maintained" }
[dependencies]
openvr_sys = { git = "https://github.com/Ralith/rust-openvr-sys.git", branch = "enum-rename" }
openvr_sys = "2"
lazy_static = "0.2.8"
[dev_dependencies]
glium = "0.14.0"
nalgebra = "0.12"

View File

@ -4,11 +4,11 @@ rust-openvr
[![Build Status](https://travis-ci.org/rust-openvr/rust-openvr.svg?branch=master)](https://travis-ci.org/rust-openvr/rust-openvr)
[![Join the chat at https://gitter.im/rust-openvr/rust-openvr](https://badges.gitter.im/rust-openvr/rust-openvr.svg)](https://gitter.im/rust-openvr/rust-openvr?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
rust-openvr is a binding for openvr.
A high-level binding for OpenVR 1.0.10.
## [Link to the documentation](http://rust-openvr.github.io/rust-openvr/openvr/index.html)
## Current sdk version: OpenVR SDK 1.0.1
[API documentation](http://rust-openvr.github.io/rust-openvr/openvr/index.html)
High-level documentation can be found at [the OpenVR wiki](https://github.com/ValveSoftware/openvr/wiki/API-Documentation).
Using rust-openvr
-----------
@ -17,11 +17,9 @@ Using rust-openvr
openvr-sys needs cmake and a C++ compiler so that it can compile and statically link the OpenVR client library.
# Building on Windows
## Windows
Rust provides 2 pre-compiled version for windows. MSVC ABI and GNU ABI. The proprietary OpenVR library which is loaded
behind the scenes by the client library is MSVC only, and therefore MSVC is required! For more informations about the
ABI in Rust see https://www.rust-lang.org/en-US/downloads.html#win-foot
Upstream OpenVR does not support MinGW. You must use an MSVC-targeted rust toolchain and C++ compiler.
# Initializing
@ -31,7 +29,7 @@ extern crate openvr;
fn main() {
// Initialize OpenVR
let context = openvr::init(openvr::ApplicationType::Scene).unwrap();
let context = unsafe { openvr::init(openvr::ApplicationType::Scene) }.unwrap();
// accessing subsystems
let system = context.system().unwrap();
@ -41,5 +39,4 @@ fn main() {
```
# Examples
For data collection examples/test.rs can be used.
For an actual opengl implementation see examples/opengl.rs (WIP)
See examples/test.rs for a more detailed example.

View File

@ -1,61 +0,0 @@
extern crate openvr;
pub fn main () {
{
// init vr system
let system = match openvr::init() {
Ok(ivr) => ivr,
Err(err) => {
println!("Failed to create IVRSystem subsystem {:?}", err);
return;
}
};
// init camera subsystem
let camera = match openvr::subsystems::tracked_camera() {
Ok(ivr) => ivr,
Err(err) => {
println!("Failed to create IVRTrackedCamera subsystem {:?}", err);
return;
}
};
// look for tracked devices with a camera
let mut camera_device = None;
for device in system.tracked_devices(0.0).connected_iter() {
if camera.has_camera(&device).unwrap_or(false) {
println!("Tracked Device with camera found, ID: {}", device.index);
println!("\t{:?}", device.device_class());
println!("\t{:?}", camera.frame_size(&device, openvr::tracked_camera::CameraFrameType::MaximumUndistorted));
println!("\t{:?}", camera.intrinisics(&device, openvr::tracked_camera::CameraFrameType::MaximumUndistorted));
camera_device = Some(device.clone());
}
}
// make sure camera is available
if camera_device.is_none() {
println!("No tracked device with camera found. Exiting..");
openvr::shutdown();
return;
}
// create stream
let stream = camera.stream(&camera_device.unwrap()).unwrap_or_else(|err| {
println!("Could not start stream to camera: {}", err.message());
openvr::shutdown();
panic!("");
});
let frame = stream.read(openvr::tracked_camera::CameraFrameType::MaximumUndistorted).unwrap_or_else(|err| {
println!("Could not read from camera stream: {}", err.message());
openvr::shutdown();
panic!("");
});
println!("Frame Data recieved! {:?}", frame);
}
openvr::shutdown();
}

View File

@ -1,21 +0,0 @@
extern crate openvr;
extern crate nalgebra;
pub fn main () {
let system = openvr::init().unwrap();
let render_model = openvr::render_models().unwrap();
let _ = openvr::compositor().unwrap();
loop {
let _ = openvr::compositor().unwrap().wait_get_poses();
let raw = system.projection_matrix(openvr::Eye::Left, 0.1, 1000.0);
let mat = nalgebra::Matrix4::new(
raw[0][0], raw[0][1], raw[0][2], raw[0][3],
raw[1][0], raw[1][1], raw[1][2], raw[1][3],
raw[2][0], raw[2][1], raw[2][2], raw[2][3],
raw[3][0], raw[3][1], raw[3][2], raw[3][3]);
println!("{:?}", mat);
}
}

View File

@ -1,299 +0,0 @@
extern crate openvr;
extern crate nalgebra;
#[macro_use]
extern crate glium;
use std::convert::From;
use nalgebra::Inverse;
use glium::framebuffer::ToColorAttachment;
use glium::framebuffer::ToDepthAttachment;
use glium::GlObject;
#[derive(Copy, Clone)]
struct Vertex {
position: [f32; 3],
normal: [f32; 3],
texcoord: [f32; 2]
}
implement_vertex!(Vertex, position, normal, texcoord);
pub fn main() {
{
// init vr system
let system = match openvr::init() {
Ok(ivr) => ivr,
Err(err) => {
println!("Failed to create IVR subsystem {:?}", err);
return;
}
};
// init render model subsystem
let models = match openvr::render_models() {
Ok(ext) => ext,
Err(err) => {
println!("Failed to create IVRRenderModels subsystem {:?}", err);
return;
}
};
for device in system.tracked_devices(0.0).connected_iter() {
println!("device found :) -> {}",
device.get_property_string(openvr::tracking::TrackedDeviceStringProperty::RenderModelName).unwrap_or_else(|_| { panic!("No render model")} ));
println!("\t{:?}", device);
println!("\t{:?}", device.device_class());
}
// init compositor subsystem
let comp = match openvr::compositor() {
Ok(ext) => ext,
Err(err) => {
println!("Failed to create IVRCompositor subsystem {:?}", err);
return;
}
};
// create glium window and context
use glium::{DisplayBuild, Surface};
let display = glium::glutin::WindowBuilder::new()
.with_depth_buffer(24)
.build_glium()
.unwrap();
// create frame buffer for hmd
let texture_size = system.recommended_render_target_size();
let left_eye_depth = glium::framebuffer::DepthRenderBuffer::new(
&display,
glium::texture::DepthFormat::I24,
texture_size.width,
texture_size.height).unwrap();
let left_eye_texture = glium::framebuffer::RenderBuffer::new(
&display,
glium::texture::UncompressedFloatFormat::U8U8U8U8,
texture_size.width,
texture_size.height).unwrap();
let mut left_eye_framebuffer = glium::framebuffer::SimpleFrameBuffer::with_depth_buffer
(
&display, left_eye_texture.to_color_attachment(), left_eye_depth.to_depth_attachment()
).unwrap();
let right_eye_depth = glium::framebuffer::DepthRenderBuffer::new(
&display,
glium::texture::DepthFormat::I24,
texture_size.width,
texture_size.height).unwrap();
let right_eye_texture = glium::framebuffer::RenderBuffer::new(
&display,
glium::texture::UncompressedFloatFormat::U8U8U8U8,
texture_size.width,
texture_size.height).unwrap();
let mut right_eye_framebuffer = glium::framebuffer::SimpleFrameBuffer::with_depth_buffer
(
&display, right_eye_texture.to_color_attachment(), right_eye_depth.to_depth_attachment()
).unwrap();
// prepare shader
let vertex_shader_src = r#"
#version 140
in vec3 position;
in vec3 normal;
in vec2 texcoord;
out vec3 v_normal;
out vec2 v_texcoord;
uniform mat4 matrix;
void main() {
v_normal = normal;
v_texcoord = texcoord;
gl_Position = matrix * vec4(position, 1.0);
}
"#;
let fragment_shader_src = r#"
#version 140
uniform sampler2D diffuse;
in vec3 v_normal;
in vec2 v_texcoord;
out vec4 color;
void main() {
color = texture(diffuse, v_texcoord);
}
"#;
let program = glium::Program::from_source(&display, vertex_shader_src, fragment_shader_src, None).unwrap();
// load controller models
let controller = models.load(String::from("lh_basestation_vive")).unwrap_or_else(|err| {
openvr::shutdown(); panic!("controller render model not found: {:?}", err) });
let mut controller_vertices: Vec<Vertex> = Vec::new();
let mut controller_indices: Vec<u16> = Vec::new();
for vertex in controller.vertex_iter() {
controller_vertices.push(Vertex {
position: [vertex.vPosition.v[0] as f32, vertex.vPosition.v[1] as f32, vertex.vPosition.v[2] as f32],
normal: [vertex.vNormal.v[0] as f32, vertex.vNormal.v[1] as f32, vertex.vNormal.v[2] as f32],
texcoord: [vertex.rfTextureCoord[0] as f32, vertex.rfTextureCoord[1] as f32],
});
}
for index in controller.index_iter() {
controller_indices.push(*index);
}
let controller_vertex_buffer = glium::VertexBuffer::new(&display, &controller_vertices).unwrap();
let controller_index_buffer = glium::IndexBuffer::new(&display, glium::index::PrimitiveType::TrianglesList, &controller_indices).unwrap();
let controller_texture_response = controller.load_texture().unwrap();
let dimension = (controller_texture_response.dimension().0 as u32, controller_texture_response.dimension().1 as u32);
let image = glium::texture::RawImage2d::from_raw_rgba(controller_texture_response.to_vec(), dimension);
let controller_texture = glium::texture::Texture2d::new(&display, image).unwrap();
// get static jmatrices
let left_projection = {
let raw = system.projection_matrix(openvr::Eye::Left, 0.01, 1000.0);
let mat = nalgebra::Matrix4::new(
raw[0][0], raw[0][1], raw[0][2], raw[0][3],
raw[1][0], raw[1][1], raw[1][2], raw[1][3],
raw[2][0], raw[2][1], raw[2][2], raw[2][3],
raw[3][0], raw[3][1], raw[3][2], raw[3][3]);
mat
};
let left_eye_transform = {
let raw = system.eye_to_head_transform(openvr::Eye::Left);
let mat = nalgebra::Matrix4::new(
raw[0][0], raw[1][0], raw[2][0], 0.0,
raw[0][1], raw[1][1], raw[2][1], 0.0,
raw[0][2], raw[1][2], raw[2][2], 0.0,
raw[0][3], raw[1][3], raw[2][3], 1.0);
mat.inverse().unwrap()
};
let right_projection = {
let raw = system.projection_matrix(openvr::Eye::Right, 0.01, 1000.0);
let mat = nalgebra::Matrix4::new(
raw[0][0], raw[0][1], raw[0][2], raw[0][3],
raw[1][0], raw[1][1], raw[1][2], raw[1][3],
raw[2][0], raw[2][1], raw[2][2], raw[2][3],
raw[3][0], raw[3][1], raw[3][2], raw[3][3]);
mat
};
let right_eye_transform = {
let raw = system.eye_to_head_transform(openvr::Eye::Right);
let mat = nalgebra::Matrix4::new(
raw[0][0], raw[1][0], raw[2][0], 0.0,
raw[0][1], raw[1][1], raw[2][1], 0.0,
raw[0][2], raw[1][2], raw[2][2], 0.0,
raw[0][3], raw[1][3], raw[2][3], 1.0);
mat.inverse().unwrap()
};
'render: loop {
// this is important to make sure frames are synced correctly
let tracked_devices = comp.wait_get_poses();
let mut left_matrix = left_projection * left_eye_transform;
let mut right_matrix = right_projection * right_eye_transform;
let mut once = false;
for device in tracked_devices.connected_iter() {
match device.device_class() {
openvr::tracking::TrackedDeviceClass::HMD => {
let matrix = {
let raw = device.to_device;
let mat = nalgebra::Matrix4::new(
raw[0][0], raw[0][1], raw[0][2], raw[0][3],
raw[1][0], raw[1][1], raw[1][2], raw[1][3],
raw[2][0], raw[2][1], raw[2][2], raw[2][3],
0.0, 0.0, 0.0, 1.0);
mat.inverse().unwrap()
};
left_matrix *= matrix;
right_matrix *= matrix;
},
openvr::tracking::TrackedDeviceClass::TrackingReference => {
if once { continue; }
once = true;
let matrix = {
let raw = device.to_device;
let mat = nalgebra::Matrix4::new(
raw[0][0], raw[0][1], raw[0][2], raw[0][3],
raw[1][0], raw[1][1], raw[1][2], raw[1][3],
raw[2][0], raw[2][1], raw[2][2], raw[2][3],
0.0, 0.0, 0.0, 1.0);
mat
};
left_matrix *= matrix;
right_matrix *= matrix;
},
_ => { }
};
}
let mut target = display.draw();
target.clear_color_and_depth((0.0, 0.0, 1.0, 1.0), 1.0);
let left_uniforms = uniform! {
matrix: *left_matrix.as_ref(),
diffuse: &controller_texture
};
let right_uniforms = uniform! {
matrix: *right_matrix.as_ref(),
diffuse: &controller_texture
};
let params = glium::DrawParameters {
depth: glium::Depth {
test: glium::draw_parameters::DepthTest::IfLess,
write: true,
.. Default::default()
},
backface_culling: glium::draw_parameters::BackfaceCullingMode::CullClockwise,
.. Default::default()
};
// render 2d display output
target.draw(&controller_vertex_buffer, &controller_index_buffer, &program, &left_uniforms, &params).unwrap();
// render hmd eye outputs
left_eye_framebuffer.clear_color_and_depth((0.0, 0.0, 1.0, 1.0), 1.0);
right_eye_framebuffer.clear_color_and_depth((0.0, 0.0, 1.0, 1.0), 1.0);
left_eye_framebuffer.draw(&controller_vertex_buffer, &controller_index_buffer, &program, &left_uniforms, &params).unwrap();
right_eye_framebuffer.draw(&controller_vertex_buffer, &controller_index_buffer, &program, &right_uniforms, &params).unwrap();
// finish all rendering
target.finish().unwrap();
// submit to hmd
comp.submit(openvr::Eye::Left, left_eye_texture.get_id() as usize, openvr::common::TextureBounds::new((0.0, 1.0), (0.0, 1.0)));
comp.submit(openvr::Eye::Right, right_eye_texture.get_id() as usize, openvr::common::TextureBounds::new((0.0, 1.0), (0.0, 1.0)));
// handle window events
for ev in display.poll_events() {
match ev {
glium::glutin::Event::Closed => break 'render, // the window has been closed by the user
_ => ()
}
}
}
}
// free openvr
openvr::shutdown();
}

View File

@ -19,7 +19,7 @@ fn main() {
let context = match unsafe { openvr::init(openvr::ApplicationType::Other) } {
Ok(ivr) => ivr,
Err(err) => {
println!("Failed to initialize openvr {:?}", err);
println!("Failed to initialize openvr: {}", err);
return;
}
};
@ -29,7 +29,7 @@ fn main() {
let system = match context.system() {
Ok(sys) => sys,
Err(err) => {
println!("Failed to get system interface {:?}", err);
println!("Failed to get system interface: {}", err);
return;
}
};
@ -42,7 +42,7 @@ fn main() {
print!("\tProjection matrix right ");
print_matrix_4x4(31, system.projection_matrix(openvr::Eye::Right, 0.1, 100.));
print!("\tEye_to_head ");
print!("\tEye to head left ");
print_matrix_4x3(8+12, system.eye_to_head_transform(openvr::Eye::Left));
print!("\tPoses ");
@ -65,49 +65,20 @@ fn main() {
println!("");
}
/*
let ext = match context.extended_display() {
Ok(ext) => ext,
Err(err) => {
println!("Failed to create IVRExtendedDisplay subsystem {:?}", err);
return;
}
};
println!("\nIVRExtendedDisplay was created");
println!("\tBounds: {:?}", ext.window_bounds());
println!("\tEye output: {:?} {:?}", ext.eye_viewport(openvr::Eye::Left), ext.eye_viewport(openvr::Eye::Right));
*/
let comp = match context.compositor() {
Ok(ext) => ext,
Err(err) => {
println!("Failed to create IVRCompositor subsystem {:?}", err);
println!("Failed to create IVRCompositor subsystem: {}", err);
return;
}
};
println!("\nIVRCompositor was created");
println!("\tIs fullscreen = {}", comp.is_fullscreen());
println!("\tInstance Extensions:");
println!("\tVulkan Instance Extensions:");
for ext in comp.vulkan_instance_extensions_required() {
println!("\t\t{:?}", ext);
}
/*
let model = match context.render_models() {
Ok(ext) => ext,
Err(err) => {
println!("Failed to create IVRRenderModels subsystem {:?}", err);
return;
}
};
println!("\nIVRRenderModels was created\n Count: {}", model.get_count());
for i in 0..model.get_count() {
println!("\t{}", model.get_name(i));
}
*/
println!("Done! \\o/");
}

View File

@ -1,80 +0,0 @@
use openvr_sys;
use openvr_sys::EVREye::*;
#[derive(Debug, Copy, Clone)]
pub struct Size {
pub width: u32,
pub height: u32
}
#[derive(Debug, Copy, Clone)]
pub struct Position {
pub x: i32,
pub y: i32
}
#[derive(Debug, Copy, Clone)]
pub struct Rectangle {
pub position: Position,
pub size: Size
}
#[derive(Debug, Copy, Clone)]
pub struct DistortionCoordinates {
pub red: [f32; 2],
pub green: [f32; 2],
pub blue: [f32; 2],
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum Eye {
Left, Right
}
impl Eye {
/// Convert a eye to a HmdEye
pub fn to_raw(&self) -> openvr_sys::EVREye {
match self {
&Eye::Left => EVREye_Eye_Left,
&Eye::Right => EVREye_Eye_Right,
}
}
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct TextureBounds {
pub u_min: f32,
pub u_max: f32,
pub v_min: f32,
pub v_max: f32
}
impl TextureBounds {
pub fn new(u: (f32, f32), v: (f32, f32)) -> Self {
TextureBounds {
u_min: u.0,
u_max: u.1,
v_min: v.0,
v_max: v.1
}
}
pub fn full() -> Self {
TextureBounds {
u_min: 0.0,
u_max: 1.0,
v_min: 0.0,
v_max: 1.0
}
}
/// Convert a bounds to a openvr_bounds
pub fn to_raw(self) -> openvr_sys::VRTextureBounds_t {
openvr_sys::VRTextureBounds_t {
uMin: self.u_min,
uMax: self.u_max,
vMin: self.v_min,
vMax: self.v_max
}
}
}

View File

@ -19,7 +19,7 @@ pub use self::texture::Texture;
use super::*;
impl<'a> Compositor<'a> {
impl Compositor {
pub fn vulkan_instance_extensions_required(&self) -> Vec<CString> {
let temp = unsafe { get_string(|ptr, n| self.0.GetVulkanInstanceExtensionsRequired.unwrap()(ptr, n)) }.unwrap();
temp.as_bytes().split(|&x| x == b' ').map(|x| CString::new(x.to_vec()).expect("extension name contained null byte")).collect()
@ -60,14 +60,14 @@ impl<'a> Compositor<'a> {
/// # Safety
///
/// The handles you supply must be valid and comply with the graphics API's synchronization requirements.
pub unsafe fn submit(&self, eye: Eye, texture: &Texture, bounds: Option<&texture::Bounds>) -> Result<(), CompositorError> {
pub unsafe fn submit(&self, eye: Eye, texture: &Texture, bounds: Option<&texture::Bounds>, pose: Option<[[f32; 4]; 3]>) -> Result<(), CompositorError> {
use self::texture::Handle::*;
let flags = match texture.handle {
Vulkan(_) => sys::EVRSubmitFlags_Submit_Default,
OpenGLTexture(_) => sys::EVRSubmitFlags_Submit_Default,
OpenGLRenderBuffer(_) => sys::EVRSubmitFlags_Submit_GlRenderBuffer,
};
let texture = sys::Texture_t {
} | if pose.is_some() { sys::EVRSubmitFlags_Submit_TextureWithPose } else { 0 };
let texture = sys::VRTextureWithPose_t_real {
handle: match texture.handle {
Vulkan(ref x) => x as *const _ as *mut _,
OpenGLTexture(x) => x as *mut _,
@ -79,6 +79,7 @@ impl<'a> Compositor<'a> {
OpenGLRenderBuffer(_) => sys::ETextureType_TextureType_OpenGL,
},
eColorSpace: texture.color_space as sys::EColorSpace,
mDeviceToAbsoluteTracking: sys::HmdMatrix34_t { m: pose.unwrap_or([[0.0; 4]; 3]) },
};
let e = self.0.Submit.unwrap()(
eye as sys::EVREye,
@ -115,6 +116,39 @@ impl<'a> Compositor<'a> {
pub fn clear_last_submitted_frame(&self) {
unsafe { self.0.ClearLastSubmittedFrame.unwrap()() }
}
/// Controls whether the application should flag the time at which the frame begins explicitly
///
/// *Vulkan/D3D12 Only*
/// There are two purposes for SetExplicitTimingMode:
/// 1. To get a more accurate GPU timestamp for when the frame begins in Vulkan/D3D12 applications.
/// 2. (Optional) To avoid having WaitGetPoses access the Vulkan queue so that the queue can be accessed from
/// another thread while WaitGetPoses is executing.
///
/// More accurate GPU timestamp for the start of the frame is achieved by the application calling
/// SubmitExplicitTimingData immediately before its first submission to the Vulkan/D3D12 queue. This is more
/// accurate because normally this GPU timestamp is recorded during WaitGetPoses. In D3D11, WaitGetPoses queues a
/// GPU timestamp write, but it does not actually get submitted to the GPU until the application flushes. By using
/// SubmitExplicitTimingData, the timestamp is recorded at the same place for Vulkan/D3D12 as it is for D3D11,
/// resulting in a more accurate GPU time measurement for the frame.
///
/// Avoiding WaitGetPoses accessing the Vulkan queue can be achieved using SetExplicitTimingMode as well. If this
/// is desired, the application *MUST* call PostPresentHandoff itself prior to WaitGetPoses. If
/// SetExplicitTimingMode is true and the application calls PostPresentHandoff, then WaitGetPoses is guaranteed not
/// to access the queue. Note that PostPresentHandoff and SubmitExplicitTimingData will access the queue, so only
/// WaitGetPoses becomes safe for accessing the queue from another thread.
pub fn set_explicit_timing_mode(&self, mode: bool) {
unsafe { self.0.SetExplicitTimingMode.unwrap()(mode) }
}
pub fn submit_explicit_timing_data(&self) -> Result<(), CompositorError> {
let e = unsafe { self.0.SubmitExplicitTimingData.unwrap()() };
if e == sys::EVRCompositorError_VRCompositorError_None {
Ok(())
} else {
Err(CompositorError(e))
}
}
}
#[derive(Debug, Copy, Clone)]

View File

@ -1,57 +0,0 @@
use openvr_sys;
use common::*;
pub struct IVRExtendedDisplay(*const ());
impl IVRExtendedDisplay {
pub unsafe fn from_raw(ptr: *const ()) -> Self {
IVRExtendedDisplay(ptr as *mut ())
}
/// Get the window bounds
pub fn window_bounds(&self) -> Rectangle {
unsafe {
let ext = * { self.0 as *mut openvr_sys::VR_IVRExtendedDisplay_FnTable };
let mut size = Size{width: 0, height: 0};
let mut pos = Position{x: 0, y: 0};
ext.GetWindowBounds.unwrap()(
&mut pos.x,
&mut pos.y,
&mut size.width,
&mut size.height
);
Rectangle {
position: pos,
size: size
}
}
}
/// Get eye viewport size
pub fn eye_viewport(&self, eye: Eye) -> Rectangle {
use std::mem;
unsafe {
let ext = * { self.0 as *mut openvr_sys::VR_IVRExtendedDisplay_FnTable };
let mut size = Size{width: 0, height: 0};
let mut pos = Position{x: 0, y: 0};
ext.GetEyeOutputViewport.unwrap()(
eye.to_raw(),
mem::transmute(&mut pos.x),
mem::transmute(&mut pos.y),
&mut size.width,
&mut size.height
);
Rectangle {
position: pos,
size: size
}
}
}
}

View File

@ -33,7 +33,8 @@ static INITIALIZED: AtomicBool = ATOMIC_BOOL_INIT;
///
/// # Safety
///
/// The `Context` MUST be dropped or shut down with `Context::shutdown` before shutting down the graphics API.
/// The `Context` MUST be dropped or shut down with `Context::shutdown` before shutting down the graphics API. No OpenVR
/// calls may be made on object derived from a `Context` after the `Context` has been dropped or explicitly shut down.
pub unsafe fn init(ty: ApplicationType) -> Result<Context, InitError> {
if INITIALIZED.swap(true, Ordering::Acquire) {
panic!("OpenVR has already been initialized!");
@ -51,13 +52,15 @@ pub unsafe fn init(ty: ApplicationType) -> Result<Context, InitError> {
Ok(Context { live: Cell::new(true) })
}
pub struct System<'a>(&'a sys::VR_IVRSystem_FnTable);
pub struct Compositor<'a>(&'a sys::VR_IVRCompositor_FnTable);
pub struct RenderModels<'a>(&'a sys::VR_IVRRenderModels_FnTable);
pub struct System(&'static sys::VR_IVRSystem_FnTable);
pub struct Compositor(&'static sys::VR_IVRCompositor_FnTable);
pub struct RenderModels(&'static sys::VR_IVRRenderModels_FnTable);
/// Entry points into OpenVR.
///
/// At most one of this object may exist at a time.
///
/// See safety notes in `init`.
pub struct Context { live: Cell<bool> }
fn load<T>(suffix: &[u8]) -> Result<*const T, InitError> {
@ -86,12 +89,15 @@ impl Drop for Context {
impl Context {
/// Shut down OpenVR. Repeated calls are safe.
///
/// Called implicitly by `Context::drop`. This MUST be called before shutting down the graphics API, or OpenVR may
/// invoke undefined behavior.
/// Called implicitly by `Context::drop`.
///
/// # Safety
///
/// No OpenVR calls may be made after this has been called unless a new `Context` is subsequently constructed.
/// This *must* be called *before* shutting down the graphics API, or OpenVR may invoke undefined behavior by
/// attempting to free graphics resources.
///
/// No calls to other OpenVR methods may be made after this has been called unless a new `Context` is first
/// constructed.
pub unsafe fn shutdown(&self) {
if self.live.replace(false) {
sys::VR_ShutdownInternal();

View File

@ -5,7 +5,7 @@ use openvr_sys as sys;
use {RenderModels, ControllerState, get_string};
impl<'a> RenderModels<'a> {
impl RenderModels {
/// Loads and returns a render model for use in the application. `name` should be a render model name from the
/// `RenderModelName_String` property or an absolute path name to a render model on disk.
///

View File

@ -34,8 +34,7 @@ pub struct Controller {
impl FromEventData for Controller {
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
let x = x.controller.as_ref();
Controller { button: x.button }
Controller { button: x.controller.button }
}
}
@ -50,8 +49,7 @@ pub struct Mouse {
impl FromEventData for Mouse {
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
let x = x.mouse.as_ref();
Mouse { position: (x.x, x.y), button: x.button }
Mouse { position: (x.mouse.x, x.mouse.y), button: x.mouse.button }
}
}
@ -67,8 +65,7 @@ pub struct Scroll {
impl FromEventData for Scroll {
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
let x = x.scroll.as_ref();
Scroll { delta: (x.xdelta, x.ydelta), repeat_count: x.repeatCount }
Scroll { delta: (x.scroll.xdelta, x.scroll.ydelta), repeat_count: x.scroll.repeatCount }
}
}
@ -88,10 +85,9 @@ pub struct TouchPadMove {
impl FromEventData for TouchPadMove {
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
let x = x.touchPadMove.as_ref();
TouchPadMove { finger_down: x.bFingerDown, seconds_finger_down: x.flSecondsFingerDown,
first: (x.fValueXFirst, x.fValueYFirst),
raw: (x.fValueXRaw, x.fValueYRaw) }
TouchPadMove { finger_down: x.touchPadMove.bFingerDown, seconds_finger_down: x.touchPadMove.flSecondsFingerDown,
first: (x.touchPadMove.fValueXFirst, x.touchPadMove.fValueYFirst),
raw: (x.touchPadMove.fValueXRaw, x.touchPadMove.fValueYRaw) }
}
}
@ -111,8 +107,7 @@ pub struct Process {
impl FromEventData for Process {
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
let x = x.process.as_ref();
Process { pid: x.pid, old_pid: x.oldPid, forced: x.bForced }
Process { pid: x.process.pid, old_pid: x.process.oldPid, forced: x.process.bForced }
}
}
@ -123,8 +118,7 @@ pub struct Overlay {
impl FromEventData for Overlay {
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
let x = x.overlay.as_ref();
Overlay { overlay_handle: x.overlayHandle }
Overlay { overlay_handle: x.overlay.overlayHandle }
}
}
@ -141,7 +135,7 @@ pub struct Keyboard {
impl FromEventData for Keyboard {
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
let x = &*(x.keyboard.as_ref() as *const _ as *const sys::VREvent_Keyboard_t_real);
let x = &*(&x.keyboard as *const _ as *const sys::VREvent_Keyboard_t_real);
Keyboard { new_input: *(x.cNewInput.as_ptr() as *const _), user_value: x.uUserValue }
}
}

View File

@ -13,7 +13,7 @@ use super::*;
pub use self::event::{Event, EventInfo};
impl<'a> System<'a> {
impl System {
/// Provides the game with the minimum size that it should use for its offscreen render target to minimize pixel
/// stretching. This size is matched with the projection matrix and distortion function and will change from display
/// to display depending on resolution, distortion, and field of view.
@ -155,10 +155,10 @@ impl<'a> System<'a> {
}
}
pub fn vulkan_output_device(&self) -> Option<*mut VkPhysicalDevice_T> {
pub fn vulkan_output_device(&self, instance: *mut VkInstance_T) -> Option<*mut VkPhysicalDevice_T> {
unsafe {
let mut device = mem::uninitialized();
self.0.GetOutputDevice.unwrap()(&mut device, sys::ETextureType_TextureType_Vulkan);
self.0.GetOutputDevice.unwrap()(&mut device, sys::ETextureType_TextureType_Vulkan, instance);
if device == 0 { None } else { Some(device as usize as *mut _) }
}
}
@ -247,6 +247,7 @@ impl<'a> System<'a> {
}
}
/// See `controller_state`
pub fn controller_state_with_pose(&self, origin: TrackingUniverseOrigin, device: TrackedDeviceIndex) -> Option<(ControllerState, TrackedDevicePose)> {
unsafe {
let mut state = mem::uninitialized();
@ -284,7 +285,7 @@ impl<'a> System<'a> {
/// This halts the timeout and dismisses the dashboard (if it was up). Applications should be sure to actually
/// prompt the user to save and then exit afterward, otherwise the user will be left in a confusing state.
pub fn acknowledge_quit_user_prompt(&self) {
unsafe { self.0.AcknowledgeQuit_Exiting.unwrap()(); }
unsafe { self.0.AcknowledgeQuit_UserPrompt.unwrap()(); }
}
}

View File

@ -1,224 +0,0 @@
use openvr_sys;
use tracking::*;
use error::*;
use subsystems::*;
pub struct IVRTrackedCamera(pub *const ());
#[derive(Debug, Copy, Clone)]
pub struct CameraFrameSize {
pub width: u32,
pub height: u32,
pub buffer: u32
}
#[derive(Debug, Copy, Clone)]
pub enum CameraFrameType {
Distorted,
Undistorted,
MaximumUndistorted,
MaximumFrameTypes
}
#[derive(Debug, Copy, Clone)]
pub struct CameraIntriniscs {
pub focal_length: [f32; 2],
pub center: [f32; 2]
}
#[derive(Debug, Copy, Clone)]
pub struct CameraFrameHeader {
pub width: u32,
pub height: u32,
pub bytes_per_pixel: u32,
pub frame_sequence: u32,
pub pose: TrackedDevicePose
}
#[derive(Debug)]
pub struct CameraFrame {
pub framebuffer: Vec<u8>,
pub header: CameraFrameHeader
}
pub struct CameraStream {
pub handle: u64,
pub owner: TrackedDevicePose
}
impl Drop for CameraStream {
fn drop(&mut self) {
unsafe {
let cam = *{ tracked_camera().unwrap().0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable };
let error = Error::from_raw(
cam.ReleaseVideoStreamingService.unwrap()(self.handle));
if error.is_err() {
println!("Failed to drop camera stream! Possible memory leak! {}", error.message());
}
}
}
}
impl CameraStream {
/// reads current camera frame
pub fn read(&self, ctype: CameraFrameType) -> Result<CameraFrame, Error<openvr_sys::EVRTrackedCameraError>> {
use std::mem;
use std;
unsafe {
// get subsystems
let cam = *{ tracked_camera().unwrap().0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable };
let size = tracked_camera().unwrap().frame_size(&self.owner, ctype).unwrap();
// create raw buffer where openvr can store it's data into
let mut buffer = Vec::<u8>::with_capacity(size.buffer as usize);
let raw_buffer = buffer.as_mut_ptr();
mem::forget(buffer);
// create header
let mut header = openvr_sys::CameraVideoStreamFrameHeader_t::default();
let error = Error::from_raw(
cam.GetVideoStreamFrameBuffer.unwrap()(
self.handle,
ctype.to_raw(),
raw_buffer as *mut std::os::raw::c_void,
size.buffer,
&mut header,
mem::size_of::<openvr_sys::CameraVideoStreamFrameHeader_t>() as u32
));
if error.is_ok() {
// bring framebuffer back into rusts controll
let buffer = Vec::from_raw_parts(raw_buffer, size.buffer as usize, size.buffer as usize);
return Ok(CameraFrame {
framebuffer: buffer,
header: CameraFrameHeader {
width: header.nWidth,
height: header.nHeight,
bytes_per_pixel: header.nBytesPerPixel,
frame_sequence: header.nFrameSequence,
pose: TrackedDevicePose::from_raw(self.owner.index, header.standingTrackedDevicePose)
}
});
} else {
return Err(error);
}
}
}
}
impl CameraFrameType {
pub fn to_raw(&self) -> openvr_sys::EVRTrackedCameraFrameType {
use openvr_sys::EVRTrackedCameraFrameType::*;
match self {
&CameraFrameType::Distorted => EVRTrackedCameraFrameType_VRTrackedCameraFrameType_Distorted,
&CameraFrameType::Undistorted => EVRTrackedCameraFrameType_VRTrackedCameraFrameType_Undistorted,
&CameraFrameType::MaximumUndistorted => EVRTrackedCameraFrameType_VRTrackedCameraFrameType_MaximumUndistorted ,
&CameraFrameType::MaximumFrameTypes => EVRTrackedCameraFrameType_MAX_CAMERA_FRAME_TYPES
}
}
}
impl IVRTrackedCamera {
pub unsafe fn from_raw(ptr: *const ()) -> Self {
IVRTrackedCamera(ptr as *mut ())
}
/// checks whether the current system has a camera
pub fn has_camera(&self, device: &TrackedDevicePose) -> Result<bool, Error<openvr_sys::EVRTrackedCameraError>> {
unsafe {
let cam = * { self.0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable };
let mut has_cam = 0i32;
let error = Error::from_raw(
cam.HasCamera.unwrap()(device.index as u32, &mut has_cam as *mut i32));
if error.is_ok() {
return Ok(has_cam > 0i32);
} else {
return Err(error);
}
}
}
/// gets frame buffer information of camera
pub fn frame_size(&self, device: &TrackedDevicePose, ctype: CameraFrameType)
-> Result<CameraFrameSize, Error<openvr_sys::EVRTrackedCameraError>>
{
unsafe {
let mut result = CameraFrameSize {
width: 0,
height: 0,
buffer: 0,
};
let cam = *{ self.0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable };
let error = Error::from_raw(
cam.GetCameraFrameSize.unwrap()(device.index as u32,
ctype.to_raw(),
&mut result.width,
&mut result.height,
&mut result.buffer));
if error.is_ok() {
return Ok(result);
} else {
return Err(error);
}
}
}
// gets camera intrinsic
pub fn intrinisics(&self, device: &TrackedDevicePose, ctype: CameraFrameType)
-> Result<CameraIntriniscs, Error<openvr_sys::EVRTrackedCameraError>>
{
unsafe {
let mut focal = openvr_sys::HmdVector2_t { v: [0.0, 0.0] };
let mut center = openvr_sys::HmdVector2_t { v: [0.0, 0.0] };
let cam = *{ self.0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable };
let error = Error::from_raw(
cam.GetCameraIntrinisics.unwrap()(device.index as u32,
ctype.to_raw(),
&mut focal,
&mut center));
if error.is_ok() {
return Ok(CameraIntriniscs {
focal_length: focal.v,
center: center.v
});
} else {
return Err(error);
}
}
}
/// aquires a stream to the given camera device
pub fn stream(&self, device: &TrackedDevicePose) -> Result<CameraStream, Error<openvr_sys::EVRTrackedCameraError>> {
unsafe {
let cam = *{ self.0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable };
let mut handle = 0u64;
let error = Error::from_raw(
cam.AcquireVideoStreamingService.unwrap()(device.index as u32, &mut handle));
if error.is_ok() {
return Ok(CameraStream {
handle: handle,
owner: *device
});
} else {
return Err(error);
}
}
}
}