Update to 1.0.10, fix travis (#31)

* Update to OpenVR 1.0.10

* Typo fix

* Missing doc comment

* Delete dead examples, cleanup

* Delete dead code

* Remove lifetimes from subsystems

OpenVR must be shut down manually, which invalidates outstanding
subsystem handles regardless of lifetimes, rendering the ergonomic
sacrifice pointless.

Future work: make shutdown safe by inserting checks before every
OpenVR call.

* Depend on our own openvr-sys

* Update metadata

* Update readme

* More detailed safety notes

* Depend on released openvr-sys
This commit is contained in:
Benjamin Saunders
2017-10-08 14:56:32 -07:00
committed by GitHub
parent eb1f18a4ea
commit 2098bcc257
14 changed files with 87 additions and 823 deletions

View File

@ -1,80 +0,0 @@
use openvr_sys;
use openvr_sys::EVREye::*;
#[derive(Debug, Copy, Clone)]
pub struct Size {
pub width: u32,
pub height: u32
}
#[derive(Debug, Copy, Clone)]
pub struct Position {
pub x: i32,
pub y: i32
}
#[derive(Debug, Copy, Clone)]
pub struct Rectangle {
pub position: Position,
pub size: Size
}
#[derive(Debug, Copy, Clone)]
pub struct DistortionCoordinates {
pub red: [f32; 2],
pub green: [f32; 2],
pub blue: [f32; 2],
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum Eye {
Left, Right
}
impl Eye {
/// Convert a eye to a HmdEye
pub fn to_raw(&self) -> openvr_sys::EVREye {
match self {
&Eye::Left => EVREye_Eye_Left,
&Eye::Right => EVREye_Eye_Right,
}
}
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct TextureBounds {
pub u_min: f32,
pub u_max: f32,
pub v_min: f32,
pub v_max: f32
}
impl TextureBounds {
pub fn new(u: (f32, f32), v: (f32, f32)) -> Self {
TextureBounds {
u_min: u.0,
u_max: u.1,
v_min: v.0,
v_max: v.1
}
}
pub fn full() -> Self {
TextureBounds {
u_min: 0.0,
u_max: 1.0,
v_min: 0.0,
v_max: 1.0
}
}
/// Convert a bounds to a openvr_bounds
pub fn to_raw(self) -> openvr_sys::VRTextureBounds_t {
openvr_sys::VRTextureBounds_t {
uMin: self.u_min,
uMax: self.u_max,
vMin: self.v_min,
vMax: self.v_max
}
}
}

View File

@ -19,7 +19,7 @@ pub use self::texture::Texture;
use super::*;
impl<'a> Compositor<'a> {
impl Compositor {
pub fn vulkan_instance_extensions_required(&self) -> Vec<CString> {
let temp = unsafe { get_string(|ptr, n| self.0.GetVulkanInstanceExtensionsRequired.unwrap()(ptr, n)) }.unwrap();
temp.as_bytes().split(|&x| x == b' ').map(|x| CString::new(x.to_vec()).expect("extension name contained null byte")).collect()
@ -60,14 +60,14 @@ impl<'a> Compositor<'a> {
/// # Safety
///
/// The handles you supply must be valid and comply with the graphics API's synchronization requirements.
pub unsafe fn submit(&self, eye: Eye, texture: &Texture, bounds: Option<&texture::Bounds>) -> Result<(), CompositorError> {
pub unsafe fn submit(&self, eye: Eye, texture: &Texture, bounds: Option<&texture::Bounds>, pose: Option<[[f32; 4]; 3]>) -> Result<(), CompositorError> {
use self::texture::Handle::*;
let flags = match texture.handle {
Vulkan(_) => sys::EVRSubmitFlags_Submit_Default,
OpenGLTexture(_) => sys::EVRSubmitFlags_Submit_Default,
OpenGLRenderBuffer(_) => sys::EVRSubmitFlags_Submit_GlRenderBuffer,
};
let texture = sys::Texture_t {
} | if pose.is_some() { sys::EVRSubmitFlags_Submit_TextureWithPose } else { 0 };
let texture = sys::VRTextureWithPose_t_real {
handle: match texture.handle {
Vulkan(ref x) => x as *const _ as *mut _,
OpenGLTexture(x) => x as *mut _,
@ -79,6 +79,7 @@ impl<'a> Compositor<'a> {
OpenGLRenderBuffer(_) => sys::ETextureType_TextureType_OpenGL,
},
eColorSpace: texture.color_space as sys::EColorSpace,
mDeviceToAbsoluteTracking: sys::HmdMatrix34_t { m: pose.unwrap_or([[0.0; 4]; 3]) },
};
let e = self.0.Submit.unwrap()(
eye as sys::EVREye,
@ -115,6 +116,39 @@ impl<'a> Compositor<'a> {
pub fn clear_last_submitted_frame(&self) {
unsafe { self.0.ClearLastSubmittedFrame.unwrap()() }
}
/// Controls whether the application should flag the time at which the frame begins explicitly
///
/// *Vulkan/D3D12 Only*
/// There are two purposes for SetExplicitTimingMode:
/// 1. To get a more accurate GPU timestamp for when the frame begins in Vulkan/D3D12 applications.
/// 2. (Optional) To avoid having WaitGetPoses access the Vulkan queue so that the queue can be accessed from
/// another thread while WaitGetPoses is executing.
///
/// More accurate GPU timestamp for the start of the frame is achieved by the application calling
/// SubmitExplicitTimingData immediately before its first submission to the Vulkan/D3D12 queue. This is more
/// accurate because normally this GPU timestamp is recorded during WaitGetPoses. In D3D11, WaitGetPoses queues a
/// GPU timestamp write, but it does not actually get submitted to the GPU until the application flushes. By using
/// SubmitExplicitTimingData, the timestamp is recorded at the same place for Vulkan/D3D12 as it is for D3D11,
/// resulting in a more accurate GPU time measurement for the frame.
///
/// Avoiding WaitGetPoses accessing the Vulkan queue can be achieved using SetExplicitTimingMode as well. If this
/// is desired, the application *MUST* call PostPresentHandoff itself prior to WaitGetPoses. If
/// SetExplicitTimingMode is true and the application calls PostPresentHandoff, then WaitGetPoses is guaranteed not
/// to access the queue. Note that PostPresentHandoff and SubmitExplicitTimingData will access the queue, so only
/// WaitGetPoses becomes safe for accessing the queue from another thread.
pub fn set_explicit_timing_mode(&self, mode: bool) {
unsafe { self.0.SetExplicitTimingMode.unwrap()(mode) }
}
pub fn submit_explicit_timing_data(&self) -> Result<(), CompositorError> {
let e = unsafe { self.0.SubmitExplicitTimingData.unwrap()() };
if e == sys::EVRCompositorError_VRCompositorError_None {
Ok(())
} else {
Err(CompositorError(e))
}
}
}
#[derive(Debug, Copy, Clone)]

View File

@ -1,57 +0,0 @@
use openvr_sys;
use common::*;
pub struct IVRExtendedDisplay(*const ());
impl IVRExtendedDisplay {
pub unsafe fn from_raw(ptr: *const ()) -> Self {
IVRExtendedDisplay(ptr as *mut ())
}
/// Get the window bounds
pub fn window_bounds(&self) -> Rectangle {
unsafe {
let ext = * { self.0 as *mut openvr_sys::VR_IVRExtendedDisplay_FnTable };
let mut size = Size{width: 0, height: 0};
let mut pos = Position{x: 0, y: 0};
ext.GetWindowBounds.unwrap()(
&mut pos.x,
&mut pos.y,
&mut size.width,
&mut size.height
);
Rectangle {
position: pos,
size: size
}
}
}
/// Get eye viewport size
pub fn eye_viewport(&self, eye: Eye) -> Rectangle {
use std::mem;
unsafe {
let ext = * { self.0 as *mut openvr_sys::VR_IVRExtendedDisplay_FnTable };
let mut size = Size{width: 0, height: 0};
let mut pos = Position{x: 0, y: 0};
ext.GetEyeOutputViewport.unwrap()(
eye.to_raw(),
mem::transmute(&mut pos.x),
mem::transmute(&mut pos.y),
&mut size.width,
&mut size.height
);
Rectangle {
position: pos,
size: size
}
}
}
}

View File

@ -33,7 +33,8 @@ static INITIALIZED: AtomicBool = ATOMIC_BOOL_INIT;
///
/// # Safety
///
/// The `Context` MUST be dropped or shut down with `Context::shutdown` before shutting down the graphics API.
/// The `Context` MUST be dropped or shut down with `Context::shutdown` before shutting down the graphics API. No OpenVR
/// calls may be made on object derived from a `Context` after the `Context` has been dropped or explicitly shut down.
pub unsafe fn init(ty: ApplicationType) -> Result<Context, InitError> {
if INITIALIZED.swap(true, Ordering::Acquire) {
panic!("OpenVR has already been initialized!");
@ -51,13 +52,15 @@ pub unsafe fn init(ty: ApplicationType) -> Result<Context, InitError> {
Ok(Context { live: Cell::new(true) })
}
pub struct System<'a>(&'a sys::VR_IVRSystem_FnTable);
pub struct Compositor<'a>(&'a sys::VR_IVRCompositor_FnTable);
pub struct RenderModels<'a>(&'a sys::VR_IVRRenderModels_FnTable);
pub struct System(&'static sys::VR_IVRSystem_FnTable);
pub struct Compositor(&'static sys::VR_IVRCompositor_FnTable);
pub struct RenderModels(&'static sys::VR_IVRRenderModels_FnTable);
/// Entry points into OpenVR.
///
/// At most one of this object may exist at a time.
///
/// See safety notes in `init`.
pub struct Context { live: Cell<bool> }
fn load<T>(suffix: &[u8]) -> Result<*const T, InitError> {
@ -86,12 +89,15 @@ impl Drop for Context {
impl Context {
/// Shut down OpenVR. Repeated calls are safe.
///
/// Called implicitly by `Context::drop`. This MUST be called before shutting down the graphics API, or OpenVR may
/// invoke undefined behavior.
/// Called implicitly by `Context::drop`.
///
/// # Safety
///
/// No OpenVR calls may be made after this has been called unless a new `Context` is subsequently constructed.
/// This *must* be called *before* shutting down the graphics API, or OpenVR may invoke undefined behavior by
/// attempting to free graphics resources.
///
/// No calls to other OpenVR methods may be made after this has been called unless a new `Context` is first
/// constructed.
pub unsafe fn shutdown(&self) {
if self.live.replace(false) {
sys::VR_ShutdownInternal();

View File

@ -5,7 +5,7 @@ use openvr_sys as sys;
use {RenderModels, ControllerState, get_string};
impl<'a> RenderModels<'a> {
impl RenderModels {
/// Loads and returns a render model for use in the application. `name` should be a render model name from the
/// `RenderModelName_String` property or an absolute path name to a render model on disk.
///

View File

@ -34,8 +34,7 @@ pub struct Controller {
impl FromEventData for Controller {
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
let x = x.controller.as_ref();
Controller { button: x.button }
Controller { button: x.controller.button }
}
}
@ -50,8 +49,7 @@ pub struct Mouse {
impl FromEventData for Mouse {
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
let x = x.mouse.as_ref();
Mouse { position: (x.x, x.y), button: x.button }
Mouse { position: (x.mouse.x, x.mouse.y), button: x.mouse.button }
}
}
@ -67,8 +65,7 @@ pub struct Scroll {
impl FromEventData for Scroll {
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
let x = x.scroll.as_ref();
Scroll { delta: (x.xdelta, x.ydelta), repeat_count: x.repeatCount }
Scroll { delta: (x.scroll.xdelta, x.scroll.ydelta), repeat_count: x.scroll.repeatCount }
}
}
@ -88,10 +85,9 @@ pub struct TouchPadMove {
impl FromEventData for TouchPadMove {
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
let x = x.touchPadMove.as_ref();
TouchPadMove { finger_down: x.bFingerDown, seconds_finger_down: x.flSecondsFingerDown,
first: (x.fValueXFirst, x.fValueYFirst),
raw: (x.fValueXRaw, x.fValueYRaw) }
TouchPadMove { finger_down: x.touchPadMove.bFingerDown, seconds_finger_down: x.touchPadMove.flSecondsFingerDown,
first: (x.touchPadMove.fValueXFirst, x.touchPadMove.fValueYFirst),
raw: (x.touchPadMove.fValueXRaw, x.touchPadMove.fValueYRaw) }
}
}
@ -111,8 +107,7 @@ pub struct Process {
impl FromEventData for Process {
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
let x = x.process.as_ref();
Process { pid: x.pid, old_pid: x.oldPid, forced: x.bForced }
Process { pid: x.process.pid, old_pid: x.process.oldPid, forced: x.process.bForced }
}
}
@ -123,8 +118,7 @@ pub struct Overlay {
impl FromEventData for Overlay {
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
let x = x.overlay.as_ref();
Overlay { overlay_handle: x.overlayHandle }
Overlay { overlay_handle: x.overlay.overlayHandle }
}
}
@ -141,7 +135,7 @@ pub struct Keyboard {
impl FromEventData for Keyboard {
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
let x = &*(x.keyboard.as_ref() as *const _ as *const sys::VREvent_Keyboard_t_real);
let x = &*(&x.keyboard as *const _ as *const sys::VREvent_Keyboard_t_real);
Keyboard { new_input: *(x.cNewInput.as_ptr() as *const _), user_value: x.uUserValue }
}
}

View File

@ -13,7 +13,7 @@ use super::*;
pub use self::event::{Event, EventInfo};
impl<'a> System<'a> {
impl System {
/// Provides the game with the minimum size that it should use for its offscreen render target to minimize pixel
/// stretching. This size is matched with the projection matrix and distortion function and will change from display
/// to display depending on resolution, distortion, and field of view.
@ -155,10 +155,10 @@ impl<'a> System<'a> {
}
}
pub fn vulkan_output_device(&self) -> Option<*mut VkPhysicalDevice_T> {
pub fn vulkan_output_device(&self, instance: *mut VkInstance_T) -> Option<*mut VkPhysicalDevice_T> {
unsafe {
let mut device = mem::uninitialized();
self.0.GetOutputDevice.unwrap()(&mut device, sys::ETextureType_TextureType_Vulkan);
self.0.GetOutputDevice.unwrap()(&mut device, sys::ETextureType_TextureType_Vulkan, instance);
if device == 0 { None } else { Some(device as usize as *mut _) }
}
}
@ -247,6 +247,7 @@ impl<'a> System<'a> {
}
}
/// See `controller_state`
pub fn controller_state_with_pose(&self, origin: TrackingUniverseOrigin, device: TrackedDeviceIndex) -> Option<(ControllerState, TrackedDevicePose)> {
unsafe {
let mut state = mem::uninitialized();
@ -284,7 +285,7 @@ impl<'a> System<'a> {
/// This halts the timeout and dismisses the dashboard (if it was up). Applications should be sure to actually
/// prompt the user to save and then exit afterward, otherwise the user will be left in a confusing state.
pub fn acknowledge_quit_user_prompt(&self) {
unsafe { self.0.AcknowledgeQuit_Exiting.unwrap()(); }
unsafe { self.0.AcknowledgeQuit_UserPrompt.unwrap()(); }
}
}

View File

@ -1,224 +0,0 @@
use openvr_sys;
use tracking::*;
use error::*;
use subsystems::*;
pub struct IVRTrackedCamera(pub *const ());
#[derive(Debug, Copy, Clone)]
pub struct CameraFrameSize {
pub width: u32,
pub height: u32,
pub buffer: u32
}
#[derive(Debug, Copy, Clone)]
pub enum CameraFrameType {
Distorted,
Undistorted,
MaximumUndistorted,
MaximumFrameTypes
}
#[derive(Debug, Copy, Clone)]
pub struct CameraIntriniscs {
pub focal_length: [f32; 2],
pub center: [f32; 2]
}
#[derive(Debug, Copy, Clone)]
pub struct CameraFrameHeader {
pub width: u32,
pub height: u32,
pub bytes_per_pixel: u32,
pub frame_sequence: u32,
pub pose: TrackedDevicePose
}
#[derive(Debug)]
pub struct CameraFrame {
pub framebuffer: Vec<u8>,
pub header: CameraFrameHeader
}
pub struct CameraStream {
pub handle: u64,
pub owner: TrackedDevicePose
}
impl Drop for CameraStream {
fn drop(&mut self) {
unsafe {
let cam = *{ tracked_camera().unwrap().0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable };
let error = Error::from_raw(
cam.ReleaseVideoStreamingService.unwrap()(self.handle));
if error.is_err() {
println!("Failed to drop camera stream! Possible memory leak! {}", error.message());
}
}
}
}
impl CameraStream {
/// reads current camera frame
pub fn read(&self, ctype: CameraFrameType) -> Result<CameraFrame, Error<openvr_sys::EVRTrackedCameraError>> {
use std::mem;
use std;
unsafe {
// get subsystems
let cam = *{ tracked_camera().unwrap().0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable };
let size = tracked_camera().unwrap().frame_size(&self.owner, ctype).unwrap();
// create raw buffer where openvr can store it's data into
let mut buffer = Vec::<u8>::with_capacity(size.buffer as usize);
let raw_buffer = buffer.as_mut_ptr();
mem::forget(buffer);
// create header
let mut header = openvr_sys::CameraVideoStreamFrameHeader_t::default();
let error = Error::from_raw(
cam.GetVideoStreamFrameBuffer.unwrap()(
self.handle,
ctype.to_raw(),
raw_buffer as *mut std::os::raw::c_void,
size.buffer,
&mut header,
mem::size_of::<openvr_sys::CameraVideoStreamFrameHeader_t>() as u32
));
if error.is_ok() {
// bring framebuffer back into rusts controll
let buffer = Vec::from_raw_parts(raw_buffer, size.buffer as usize, size.buffer as usize);
return Ok(CameraFrame {
framebuffer: buffer,
header: CameraFrameHeader {
width: header.nWidth,
height: header.nHeight,
bytes_per_pixel: header.nBytesPerPixel,
frame_sequence: header.nFrameSequence,
pose: TrackedDevicePose::from_raw(self.owner.index, header.standingTrackedDevicePose)
}
});
} else {
return Err(error);
}
}
}
}
impl CameraFrameType {
pub fn to_raw(&self) -> openvr_sys::EVRTrackedCameraFrameType {
use openvr_sys::EVRTrackedCameraFrameType::*;
match self {
&CameraFrameType::Distorted => EVRTrackedCameraFrameType_VRTrackedCameraFrameType_Distorted,
&CameraFrameType::Undistorted => EVRTrackedCameraFrameType_VRTrackedCameraFrameType_Undistorted,
&CameraFrameType::MaximumUndistorted => EVRTrackedCameraFrameType_VRTrackedCameraFrameType_MaximumUndistorted ,
&CameraFrameType::MaximumFrameTypes => EVRTrackedCameraFrameType_MAX_CAMERA_FRAME_TYPES
}
}
}
impl IVRTrackedCamera {
pub unsafe fn from_raw(ptr: *const ()) -> Self {
IVRTrackedCamera(ptr as *mut ())
}
/// checks whether the current system has a camera
pub fn has_camera(&self, device: &TrackedDevicePose) -> Result<bool, Error<openvr_sys::EVRTrackedCameraError>> {
unsafe {
let cam = * { self.0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable };
let mut has_cam = 0i32;
let error = Error::from_raw(
cam.HasCamera.unwrap()(device.index as u32, &mut has_cam as *mut i32));
if error.is_ok() {
return Ok(has_cam > 0i32);
} else {
return Err(error);
}
}
}
/// gets frame buffer information of camera
pub fn frame_size(&self, device: &TrackedDevicePose, ctype: CameraFrameType)
-> Result<CameraFrameSize, Error<openvr_sys::EVRTrackedCameraError>>
{
unsafe {
let mut result = CameraFrameSize {
width: 0,
height: 0,
buffer: 0,
};
let cam = *{ self.0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable };
let error = Error::from_raw(
cam.GetCameraFrameSize.unwrap()(device.index as u32,
ctype.to_raw(),
&mut result.width,
&mut result.height,
&mut result.buffer));
if error.is_ok() {
return Ok(result);
} else {
return Err(error);
}
}
}
// gets camera intrinsic
pub fn intrinisics(&self, device: &TrackedDevicePose, ctype: CameraFrameType)
-> Result<CameraIntriniscs, Error<openvr_sys::EVRTrackedCameraError>>
{
unsafe {
let mut focal = openvr_sys::HmdVector2_t { v: [0.0, 0.0] };
let mut center = openvr_sys::HmdVector2_t { v: [0.0, 0.0] };
let cam = *{ self.0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable };
let error = Error::from_raw(
cam.GetCameraIntrinisics.unwrap()(device.index as u32,
ctype.to_raw(),
&mut focal,
&mut center));
if error.is_ok() {
return Ok(CameraIntriniscs {
focal_length: focal.v,
center: center.v
});
} else {
return Err(error);
}
}
}
/// aquires a stream to the given camera device
pub fn stream(&self, device: &TrackedDevicePose) -> Result<CameraStream, Error<openvr_sys::EVRTrackedCameraError>> {
unsafe {
let cam = *{ self.0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable };
let mut handle = 0u64;
let error = Error::from_raw(
cam.AcquireVideoStreamingService.unwrap()(device.index as u32, &mut handle));
if error.is_ok() {
return Ok(CameraStream {
handle: handle,
owner: *device
});
} else {
return Err(error);
}
}
}
}