mirror of
https://github.com/mii443/rust-openvr.git
synced 2025-08-22 16:25:36 +00:00
Rearrange modules, tweak Context semantics, support OpenGL
This commit is contained in:
@ -13,6 +13,10 @@ use std::ffi::CString;
|
||||
|
||||
use openvr_sys as sys;
|
||||
|
||||
pub mod texture;
|
||||
|
||||
pub use self::texture::Texture;
|
||||
|
||||
use super::*;
|
||||
|
||||
impl<'a> Compositor<'a> {
|
||||
@ -63,22 +67,31 @@ impl<'a> Compositor<'a> {
|
||||
/// Display the supplied texture for the next frame.
|
||||
///
|
||||
/// If `bounds` is None, the entire texture will be used. Lens distortion is handled by the OpenVR implementation.
|
||||
pub fn submit(&self, eye: Eye, texture: &Texture, bounds: Option<&TextureBounds>) -> Result<(), CompositorError> {
|
||||
use self::TextureHandle::*;
|
||||
pub fn submit(&self, eye: Eye, texture: &Texture, bounds: Option<&texture::Bounds>) -> Result<(), CompositorError> {
|
||||
use self::texture::Handle::*;
|
||||
let flags = match texture.handle {
|
||||
Vulkan(_) => sys::EVRSubmitFlags_EVRSubmitFlags_Submit_Default,
|
||||
OpenGLTexture(_) => sys::EVRSubmitFlags_EVRSubmitFlags_Submit_Default,
|
||||
OpenGLRenderBuffer(_) => sys::EVRSubmitFlags_EVRSubmitFlags_Submit_GlRenderBuffer,
|
||||
};
|
||||
let texture = sys::Texture_t {
|
||||
handle: match texture.handle {
|
||||
Vulkan(ref x) => x as *const _ as *mut _,
|
||||
OpenGLTexture(x) => x as *mut _,
|
||||
OpenGLRenderBuffer(x) => x as *mut _,
|
||||
},
|
||||
eType: match texture.handle {
|
||||
Vulkan(_) => sys::ETextureType_ETextureType_TextureType_Vulkan,
|
||||
OpenGLTexture(_) => sys::ETextureType_ETextureType_TextureType_OpenGL,
|
||||
OpenGLRenderBuffer(_) => sys::ETextureType_ETextureType_TextureType_OpenGL,
|
||||
},
|
||||
eColorSpace: texture.color_space as sys::EColorSpace,
|
||||
};
|
||||
let e = unsafe {
|
||||
(self.0.Submit.unwrap())(eye as sys::EVREye,
|
||||
&texture as *const _ as *mut _,
|
||||
bounds.map(|x| x as *const _ as *mut TextureBounds as *mut _).unwrap_or(ptr::null_mut()),
|
||||
sys::EVRSubmitFlags_EVRSubmitFlags_Submit_Default)
|
||||
self.0.Submit.unwrap()(eye as sys::EVREye,
|
||||
&texture as *const _ as *mut _,
|
||||
bounds.map(|x| x as *const _ as *mut texture::Bounds as *mut _).unwrap_or(ptr::null_mut()),
|
||||
flags)
|
||||
};
|
||||
if e == sys::EVRCompositorError_EVRCompositorError_VRCompositorError_None {
|
||||
Ok(())
|
||||
@ -100,55 +113,6 @@ pub struct WaitPoses {
|
||||
pub game: TrackedDevicePoses,
|
||||
}
|
||||
|
||||
pub use sys::VkPhysicalDevice_T;
|
||||
pub use sys::VkDevice_T;
|
||||
pub use sys::VkInstance_T;
|
||||
pub use sys::VkQueue_T;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Texture {
|
||||
pub handle: TextureHandle,
|
||||
pub color_space: ColorSpace,
|
||||
}
|
||||
|
||||
pub mod vulkan {
|
||||
use super::*;
|
||||
#[repr(C)]
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Texture {
|
||||
pub image: u64,
|
||||
pub device: *mut VkDevice_T,
|
||||
pub physical_device: *mut VkPhysicalDevice_T,
|
||||
pub instance: *mut VkInstance_T,
|
||||
pub queue: *mut VkQueue_T,
|
||||
pub queue_family_index: u32,
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
pub format: u32,
|
||||
pub sample_count: u32,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum TextureHandle {
|
||||
Vulkan(vulkan::Texture),
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||
pub enum ColorSpace {
|
||||
Auto = sys::EColorSpace_EColorSpace_ColorSpace_Auto as isize,
|
||||
Gamma = sys::EColorSpace_EColorSpace_ColorSpace_Gamma as isize,
|
||||
Linear = sys::EColorSpace_EColorSpace_ColorSpace_Linear as isize,
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
pub struct TextureBounds {
|
||||
pub umin: f32,
|
||||
pub vmin: f32,
|
||||
pub umax: f32,
|
||||
pub vmax: f32,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
pub struct CompositorError(sys::EVRCompositorError);
|
||||
|
||||
@ -197,3 +161,8 @@ impl fmt::Display for CompositorError {
|
||||
f.pad(error::Error::description(self))
|
||||
}
|
||||
}
|
||||
|
||||
pub use sys::VkPhysicalDevice_T;
|
||||
pub use sys::VkDevice_T;
|
||||
pub use sys::VkInstance_T;
|
||||
pub use sys::VkQueue_T;
|
45
src/compositor/texture.rs
Normal file
45
src/compositor/texture.rs
Normal file
@ -0,0 +1,45 @@
|
||||
use super::{sys, VkInstance_T, VkDevice_T, VkPhysicalDevice_T, VkQueue_T};
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Texture {
|
||||
pub handle: Handle,
|
||||
pub color_space: ColorSpace,
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
pub struct Bounds {
|
||||
pub min: (f32, f32),
|
||||
pub max: (f32, f32),
|
||||
}
|
||||
|
||||
pub mod vulkan {
|
||||
use super::*;
|
||||
#[repr(C)]
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Texture {
|
||||
pub image: u64,
|
||||
pub device: *mut VkDevice_T,
|
||||
pub physical_device: *mut VkPhysicalDevice_T,
|
||||
pub instance: *mut VkInstance_T,
|
||||
pub queue: *mut VkQueue_T,
|
||||
pub queue_family_index: u32,
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
pub format: u32,
|
||||
pub sample_count: u32,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum Handle {
|
||||
Vulkan(vulkan::Texture),
|
||||
OpenGLTexture(usize),
|
||||
OpenGLRenderBuffer(usize),
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||
pub enum ColorSpace {
|
||||
Auto = sys::EColorSpace_EColorSpace_ColorSpace_Auto as isize,
|
||||
Gamma = sys::EColorSpace_EColorSpace_ColorSpace_Gamma as isize,
|
||||
Linear = sys::EColorSpace_EColorSpace_ColorSpace_Linear as isize,
|
||||
}
|
42
src/lib.rs
42
src/lib.rs
@ -33,42 +33,33 @@ pub fn init(ty: ApplicationType) -> Result<Context, InitError> {
|
||||
unsafe { sys::VR_ShutdownInternal() }
|
||||
return Err(InitError(sys::EVRInitError_EVRInitError_VRInitError_Init_InterfaceNotFound));
|
||||
}
|
||||
Ok(unsafe { Context::new() }?)
|
||||
Ok(Context {})
|
||||
}
|
||||
|
||||
pub struct System<'a>(&'a sys::VR_IVRSystem_FnTable);
|
||||
pub struct Compositor<'a>(&'a sys::VR_IVRCompositor_FnTable);
|
||||
pub struct RenderModels<'a>(&'a sys::VR_IVRRenderModels_FnTable);
|
||||
|
||||
/// Entry points into OpenVR.
|
||||
///
|
||||
/// At most one of this object may exist at a time.
|
||||
pub struct Context {
|
||||
system: *const sys::VR_IVRSystem_FnTable,
|
||||
compositor: *const sys::VR_IVRCompositor_FnTable,
|
||||
pub struct Context {}
|
||||
|
||||
fn load<T>(suffix: &[u8]) -> Result<*const T, InitError> {
|
||||
let mut magic = Vec::from(b"FnTable:".as_ref());
|
||||
magic.extend(suffix);
|
||||
let mut error = sys::EVRInitError_EVRInitError_VRInitError_None;
|
||||
let result = unsafe { sys::VR_GetGenericInterface(magic.as_ptr() as *const i8, &mut error) };
|
||||
if error != sys::EVRInitError_EVRInitError_VRInitError_None {
|
||||
return Err(InitError(sys::EVRInitError_EVRInitError_VRInitError_Init_InterfaceNotFound));
|
||||
}
|
||||
Ok(result as *const T)
|
||||
}
|
||||
|
||||
impl Context {
|
||||
/// Must be called after sys::VR_InitInternal
|
||||
unsafe fn new() -> Result<Self, InitError> {
|
||||
fn load<T>(suffix: &[u8]) -> Result<*const T, InitError> {
|
||||
let mut magic = Vec::from(b"FnTable:".as_ref());
|
||||
magic.extend(suffix);
|
||||
let mut error = sys::EVRInitError_EVRInitError_VRInitError_None;
|
||||
let result = unsafe { sys::VR_GetGenericInterface(magic.as_ptr() as *const i8, &mut error) };
|
||||
if error != sys::EVRInitError_EVRInitError_VRInitError_None {
|
||||
return Err(InitError(sys::EVRInitError_EVRInitError_VRInitError_Init_InterfaceNotFound));
|
||||
}
|
||||
Ok(result as *const T)
|
||||
}
|
||||
|
||||
Ok(Context {
|
||||
system: load(sys::IVRSystem_Version)?,
|
||||
compositor: load(sys::IVRCompositor_Version)?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn system(&self) -> System { unsafe { System(&*self.system) } }
|
||||
pub fn compositor(&self) -> Compositor { unsafe { Compositor(&*self.compositor) } }
|
||||
pub fn system(&self) -> Result<System, InitError> { load(sys::IVRSystem_Version).map(|x| unsafe { System(&*x) }) }
|
||||
pub fn compositor(&self) -> Result<Compositor, InitError> { load(sys::IVRCompositor_Version).map(|x| unsafe { Compositor(&*x) }) }
|
||||
pub fn render_models(&self) -> Result<RenderModels, InitError> { load(sys::IVRRenderModels_Version).map(|x| unsafe { RenderModels(&*x) }) }
|
||||
}
|
||||
|
||||
impl Drop for Context {
|
||||
@ -100,6 +91,7 @@ pub enum ApplicationType {
|
||||
Bootstrapper = sys::EVRApplicationType_EVRApplicationType_VRApplication_Bootstrapper as isize,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct InitError(sys::EVRInitError);
|
||||
|
||||
impl fmt::Debug for InitError {
|
||||
|
@ -1,135 +1,3 @@
|
||||
//! The `System` interface provides access to display configuration information, tracking data, controller state,
|
||||
//! events, and device properties. It is the main interface of OpenVR.
|
||||
|
||||
use std::mem;
|
||||
|
||||
use openvr_sys as sys;
|
||||
|
||||
use super::*;
|
||||
|
||||
impl<'a> System<'a> {
|
||||
/// Provides the game with the minimum size that it should use for its offscreen render target to minimize pixel
|
||||
/// stretching. This size is matched with the projection matrix and distortion function and will change from display
|
||||
/// to display depending on resolution, distortion, and field of view.
|
||||
pub fn recommended_render_target_size(&self) -> (u32, u32) {
|
||||
unsafe {
|
||||
let mut result: (u32, u32) = mem::uninitialized();
|
||||
(self.0.GetRecommendedRenderTargetSize.unwrap())(&mut result.0, &mut result.1);
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the projection matrix to use for the specified eye.
|
||||
///
|
||||
/// Clip plane distances are in meters.
|
||||
pub fn projection_matrix(&self, eye: Eye, near_z: f32, far_z: f32) -> [[f32; 4]; 4] {
|
||||
unsafe { (self.0.GetProjectionMatrix.unwrap())(eye as sys::EVREye, near_z, far_z) }.m
|
||||
}
|
||||
|
||||
/// Returns the raw project values to use for the specified eye. Most games should use GetProjectionMatrix instead
|
||||
/// of this method, but sometimes a game needs to do something fancy with its projection and can use these values to
|
||||
/// compute its own matrix.
|
||||
pub fn projection_raw(&self, eye: Eye) -> RawProjection {
|
||||
unsafe {
|
||||
let mut result: RawProjection = mem::uninitialized();
|
||||
(self.0.GetProjectionRaw.unwrap())(eye as sys::EVREye, &mut result.left, &mut result.right, &mut result.top, &mut result.bottom);
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the transform between the view space and eye space. Eye space is the per-eye flavor of view space that
|
||||
/// provides stereo disparity. Instead of Model * View * Projection the model is Model * View * Eye *
|
||||
/// Projection. Normally View and Eye will be multiplied together and treated as View in your application.
|
||||
pub fn eye_to_head_transform(&self, eye: Eye) -> [[f32; 4]; 3] {
|
||||
unsafe { (self.0.GetEyeToHeadTransform.unwrap())(eye as sys::EVREye) }.m
|
||||
}
|
||||
|
||||
/// Returns the number of elapsed seconds since the last recorded vsync event and the global number of frames that
|
||||
/// have been rendered. Timing information will come from a vsync timer event in the timer if possible or from the
|
||||
/// application-reported time if that is not available. If no vsync times are available the function will return
|
||||
/// None.
|
||||
pub fn time_since_last_vsync(&self) -> Option<(f32, u64)> {
|
||||
unsafe {
|
||||
let mut result: (f32, u64) = mem::uninitialized();
|
||||
if (self.0.GetTimeSinceLastVsync.unwrap())(&mut result.0, &mut result.1) {
|
||||
Some(result)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Calculates updated poses for all devices.
|
||||
///
|
||||
/// The pose that the tracker thinks that the HMD will be in at the specified number of seconds into the
|
||||
/// future. Pass 0 to get the state at the instant the method is called. Most of the time the application should
|
||||
/// calculate the time until the photons will be emitted from the display and pass that time into the method.
|
||||
///
|
||||
/// This is roughly analogous to the inverse of the view matrix in most applications, though many games will need to
|
||||
/// do some additional rotation or translation on top of the rotation and translation provided by the head pose.
|
||||
///
|
||||
/// Seated experiences should call this method with TrackingUniverseSeated and receive poses relative to the seated
|
||||
/// zero pose. Standing experiences should call this method with TrackingUniverseStanding and receive poses relative
|
||||
/// to the chaperone soft bounds. TrackingUniverseRawAndUncalibrated should probably not be used unless the
|
||||
/// application is the chaperone calibration tool itself, but will provide poses relative to the hardware-specific
|
||||
/// coordinate system in the driver.
|
||||
pub fn device_to_absolute_tracking_pose(&self, origin: TrackingUniverseOrigin, predicted_seconds_to_photons_from_now: f32) -> TrackedDevicePoses {
|
||||
unsafe {
|
||||
let mut result: TrackedDevicePoses = mem::uninitialized();
|
||||
(self.0.GetDeviceToAbsoluteTrackingPose.unwrap())(origin as sys::ETrackingUniverseOrigin, predicted_seconds_to_photons_from_now,
|
||||
result.data.as_mut().as_mut_ptr() as *mut _, result.data.len() as u32);
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tracked_device_class(&self, index: TrackedDeviceIndex) -> TrackedDeviceClass {
|
||||
use self::TrackedDeviceClass::*;
|
||||
match unsafe { (self.0.GetTrackedDeviceClass.unwrap())(index) } {
|
||||
sys::ETrackedDeviceClass_ETrackedDeviceClass_TrackedDeviceClass_Invalid => Invalid,
|
||||
sys::ETrackedDeviceClass_ETrackedDeviceClass_TrackedDeviceClass_HMD => HMD,
|
||||
sys::ETrackedDeviceClass_ETrackedDeviceClass_TrackedDeviceClass_Controller => Controller,
|
||||
sys::ETrackedDeviceClass_ETrackedDeviceClass_TrackedDeviceClass_GenericTracker => GenericTracker,
|
||||
sys::ETrackedDeviceClass_ETrackedDeviceClass_TrackedDeviceClass_TrackingReference => TrackingReference,
|
||||
sys::ETrackedDeviceClass_ETrackedDeviceClass_TrackedDeviceClass_DisplayRedirect => DisplayRedirect,
|
||||
_ => Invalid,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_tracked_device_connected(&self, index: TrackedDeviceIndex) -> bool {
|
||||
unsafe { (self.0.IsTrackedDeviceConnected.unwrap())(index) }
|
||||
}
|
||||
|
||||
pub fn poll_next_event_with_pose(&self, origin: TrackingUniverseOrigin) -> Option<(EventInfo, TrackedDevicePose)> {
|
||||
let mut event = unsafe { mem::uninitialized() };
|
||||
let mut pose = unsafe { mem::uninitialized() };
|
||||
if unsafe { self.0.PollNextEventWithPose.unwrap()(origin as sys::ETrackingUniverseOrigin,
|
||||
&mut event, mem::size_of_val(&event) as u32,
|
||||
&mut pose as *mut _ as *mut _) }
|
||||
{
|
||||
Some((EventInfo {
|
||||
tracked_device_index: event.trackedDeviceIndex,
|
||||
age: event.eventAgeSeconds,
|
||||
event: Event::from_sys(event.eventType, &event.data)
|
||||
}, pose))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Values represent the tangents of the half-angles from the center view axis
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct RawProjection {
|
||||
/// tangent of the half-angle from center axis to the left clipping plane
|
||||
pub left: f32,
|
||||
/// tangent of the half-angle from center axis to the right clipping plane
|
||||
pub right: f32,
|
||||
/// tangent of the half-angle from center axis to the top clipping plane
|
||||
pub top: f32,
|
||||
/// tangent of the half-angle from center axis to the bottom clipping plane
|
||||
pub bottom: f32,
|
||||
}
|
||||
|
||||
pub struct EventInfo {
|
||||
/// The tracked device index of the event. For events that aren't connected to a tracked device this is
|
||||
/// k_unTrackedDeviceIndexInvalid
|
||||
@ -142,162 +10,170 @@ pub struct EventInfo {
|
||||
pub event: Event,
|
||||
}
|
||||
|
||||
pub trait FromEventData {
|
||||
impl From<sys::VREvent_t> for EventInfo {
|
||||
fn from(x: sys::VREvent_t) -> Self {
|
||||
EventInfo {
|
||||
tracked_device_index: x.trackedDeviceIndex,
|
||||
age: x.eventAgeSeconds,
|
||||
event: Event::from_sys(x.eventType, &x.data)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait FromEventData {
|
||||
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self;
|
||||
}
|
||||
|
||||
pub mod event {
|
||||
use super::*;
|
||||
use super::*;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
/// Controller button events
|
||||
pub struct Controller {
|
||||
pub button: u32,
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
/// Controller button events
|
||||
pub struct Controller {
|
||||
pub button: u32,
|
||||
}
|
||||
|
||||
impl FromEventData for Controller {
|
||||
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
|
||||
let x = x.controller.as_ref();
|
||||
Controller { button: x.button }
|
||||
}
|
||||
}
|
||||
|
||||
impl FromEventData for Controller {
|
||||
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
|
||||
let x = x.controller.as_ref();
|
||||
Controller { button: x.button }
|
||||
}
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
/// Simulated mouse events in overlay space
|
||||
pub struct Mouse {
|
||||
/// Absolute position in texcoords, with the origin at the bottom left.
|
||||
pub position: (f32, f32),
|
||||
/// Bitfield
|
||||
pub button: u32,
|
||||
}
|
||||
|
||||
impl FromEventData for Mouse {
|
||||
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
|
||||
let x = x.mouse.as_ref();
|
||||
Mouse { position: (x.x, x.y), button: x.button }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
/// Simulated mouse events in overlay space
|
||||
pub struct Mouse {
|
||||
/// Absolute position in texcoords, with the origin at the bottom left.
|
||||
pub position: (f32, f32),
|
||||
/// Bitfield
|
||||
pub button: u32,
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
/// Simulated mouse wheel scroll in overlay space
|
||||
///
|
||||
/// Coordinates are fraction of the touchpad traversed since last scroll event.
|
||||
pub struct Scroll {
|
||||
pub delta: (f32, f32),
|
||||
pub repeat_count: u32,
|
||||
}
|
||||
|
||||
impl FromEventData for Scroll {
|
||||
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
|
||||
let x = x.scroll.as_ref();
|
||||
Scroll { delta: (x.xdelta, x.ydelta), repeat_count: x.repeatCount }
|
||||
}
|
||||
}
|
||||
|
||||
impl FromEventData for Mouse {
|
||||
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
|
||||
let x = x.mouse.as_ref();
|
||||
Mouse { position: (x.x, x.y), button: x.button }
|
||||
}
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
/// When in mouse input mode you can receive data from the touchpad, these events are only sent if the user's finger
|
||||
/// is on the touchpad (or just released from it)
|
||||
pub struct TouchPadMove {
|
||||
/// if the user's finger is detected on the touch pad
|
||||
pub finger_down: bool,
|
||||
/// How long the finger has been down in seconds
|
||||
pub seconds_finger_down: f32,
|
||||
/// Starting finger position (so you can do some basic swipe stuff)
|
||||
pub first: (f32, f32),
|
||||
/// This is the raw sampled coordinate without deadzoning
|
||||
pub raw: (f32, f32),
|
||||
}
|
||||
|
||||
impl FromEventData for TouchPadMove {
|
||||
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
|
||||
let x = x.touchPadMove.as_ref();
|
||||
TouchPadMove { finger_down: x.bFingerDown, seconds_finger_down: x.flSecondsFingerDown,
|
||||
first: (x.fValueXFirst, x.fValueYFirst),
|
||||
raw: (x.fValueXRaw, x.fValueYRaw) }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
/// notification related events. Details will still change at this point
|
||||
pub struct Notification {
|
||||
pub user_value: u64,
|
||||
pub notification_id: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
/// Simulated mouse wheel scroll in overlay space
|
||||
///
|
||||
/// Coordinates are fraction of the touchpad traversed since last scroll event.
|
||||
pub struct Scroll {
|
||||
pub delta: (f32, f32),
|
||||
pub repeat_count: u32,
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Process {
|
||||
pub pid: u32,
|
||||
pub old_pid: u32,
|
||||
pub forced: bool,
|
||||
}
|
||||
|
||||
impl FromEventData for Process {
|
||||
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
|
||||
let x = x.process.as_ref();
|
||||
Process { pid: x.pid, old_pid: x.oldPid, forced: x.bForced }
|
||||
}
|
||||
}
|
||||
|
||||
impl FromEventData for Scroll {
|
||||
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
|
||||
let x = x.scroll.as_ref();
|
||||
Scroll { delta: (x.xdelta, x.ydelta), repeat_count: x.repeatCount }
|
||||
}
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Overlay {
|
||||
pub overlay_handle: u64,
|
||||
}
|
||||
|
||||
impl FromEventData for Overlay {
|
||||
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
|
||||
let x = x.overlay.as_ref();
|
||||
Overlay { overlay_handle: x.overlayHandle }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
/// When in mouse input mode you can receive data from the touchpad, these events are only sent if the user's finger
|
||||
/// is on the touchpad (or just released from it)
|
||||
pub struct TouchPadMove {
|
||||
/// if the user's finger is detected on the touch pad
|
||||
pub finger_down: bool,
|
||||
/// How long the finger has been down in seconds
|
||||
pub seconds_finger_down: f32,
|
||||
/// Starting finger position (so you can do some basic swipe stuff)
|
||||
pub first: (f32, f32),
|
||||
/// This is the raw sampled coordinate without deadzoning
|
||||
pub raw: (f32, f32),
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Status {
|
||||
pub status_state: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Keyboard {
|
||||
pub new_input: [u8; 8],
|
||||
pub user_value: u64,
|
||||
}
|
||||
|
||||
impl FromEventData for Keyboard {
|
||||
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
|
||||
let x = &*(x.keyboard.as_ref() as *const _ as *const sys::VREvent_Keyboard_t_real);
|
||||
Keyboard { new_input: *(x.cNewInput.as_ptr() as *const _), user_value: x.uUserValue }
|
||||
}
|
||||
}
|
||||
|
||||
impl FromEventData for TouchPadMove {
|
||||
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
|
||||
let x = x.touchPadMove.as_ref();
|
||||
TouchPadMove { finger_down: x.bFingerDown, seconds_finger_down: x.flSecondsFingerDown,
|
||||
first: (x.fValueXFirst, x.fValueYFirst),
|
||||
raw: (x.fValueXRaw, x.fValueYRaw) }
|
||||
}
|
||||
}
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Ipd {
|
||||
pub ipd_meters: f32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
/// notification related events. Details will still change at this point
|
||||
pub struct Notification {
|
||||
pub user_value: u64,
|
||||
pub notification_id: u32,
|
||||
}
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Chaperone {
|
||||
pub previous_universe: u64,
|
||||
pub current_universe: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Process {
|
||||
pub pid: u32,
|
||||
pub old_pid: u32,
|
||||
pub forced: bool,
|
||||
}
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Property {
|
||||
pub container: PropertyContainerHandle,
|
||||
pub property: TrackedDeviceProperty,
|
||||
}
|
||||
|
||||
impl FromEventData for Process {
|
||||
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
|
||||
let x = x.process.as_ref();
|
||||
Process { pid: x.pid, old_pid: x.oldPid, forced: x.bForced }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Overlay {
|
||||
pub overlay_handle: u64,
|
||||
}
|
||||
|
||||
impl FromEventData for Overlay {
|
||||
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
|
||||
let x = x.overlay.as_ref();
|
||||
Overlay { overlay_handle: x.overlayHandle }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Status {
|
||||
pub status_state: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Keyboard {
|
||||
pub new_input: [u8; 8],
|
||||
pub user_value: u64,
|
||||
}
|
||||
|
||||
impl FromEventData for Keyboard {
|
||||
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
|
||||
let x = &*(x.keyboard.as_ref() as *const _ as *const sys::VREvent_Keyboard_t_real);
|
||||
Keyboard { new_input: *(x.cNewInput.as_ptr() as *const _), user_value: x.uUserValue }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Ipd {
|
||||
pub ipd_meters: f32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Chaperone {
|
||||
pub previous_universe: u64,
|
||||
pub current_universe: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Property {
|
||||
pub container: PropertyContainerHandle,
|
||||
pub property: TrackedDeviceProperty,
|
||||
}
|
||||
|
||||
impl FromEventData for Property {
|
||||
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
|
||||
let x: &sys::VREvent_Property_t = &*(x as *const _ as *const _); // Field is missing from union
|
||||
Property {
|
||||
container: x.container,
|
||||
property: x.prop,
|
||||
}
|
||||
impl FromEventData for Property {
|
||||
unsafe fn from_event_data(x: &sys::VREvent_Data_t) -> Self {
|
||||
let x: &sys::VREvent_Property_t = &*(x as *const _ as *const _); // Field is missing from union
|
||||
Property {
|
||||
container: x.container,
|
||||
property: x.prop,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(non_camel_case_types)]
|
||||
#[allow(non_camel_case_types, deprecated)]
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum Event {
|
||||
TrackedDeviceActivated,
|
||||
@ -311,35 +187,35 @@ pub enum Event {
|
||||
TrackedDeviceRoleChanged,
|
||||
WatchdogWakeUpRequested,
|
||||
LensDistortionChanged,
|
||||
PropertyChanged(event::Property),
|
||||
PropertyChanged(Property),
|
||||
|
||||
ButtonPress(event::Controller),
|
||||
ButtonUnpress(event::Controller),
|
||||
ButtonTouch(event::Controller),
|
||||
ButtonUntouch(event::Controller),
|
||||
ButtonPress(Controller),
|
||||
ButtonUnpress(Controller),
|
||||
ButtonTouch(Controller),
|
||||
ButtonUntouch(Controller),
|
||||
|
||||
MouseMove(event::Mouse),
|
||||
MouseButtonDown(event::Mouse),
|
||||
MouseButtonUp(event::Mouse),
|
||||
FocusEnter(event::Overlay),
|
||||
FocusLeave(event::Overlay),
|
||||
Scroll(event::Scroll),
|
||||
TouchPadMove(event::TouchPadMove),
|
||||
MouseMove(Mouse),
|
||||
MouseButtonDown(Mouse),
|
||||
MouseButtonUp(Mouse),
|
||||
FocusEnter(Overlay),
|
||||
FocusLeave(Overlay),
|
||||
Scroll(Scroll),
|
||||
TouchPadMove(TouchPadMove),
|
||||
/// global event
|
||||
OverlayFocusChanged(event::Overlay),
|
||||
OverlayFocusChanged(Overlay),
|
||||
|
||||
#[deprecated]
|
||||
InputFocusCaptured(event::Process),
|
||||
InputFocusCaptured(Process),
|
||||
#[deprecated]
|
||||
InputFocusReleased(event::Process),
|
||||
SceneFocusLost(event::Process),
|
||||
SceneFocusGained(event::Process),
|
||||
InputFocusReleased(Process),
|
||||
SceneFocusLost(Process),
|
||||
SceneFocusGained(Process),
|
||||
/// The app actually drawing the scene changed (usually to or from the compositor)
|
||||
SceneApplicationChanged(event::Process),
|
||||
SceneApplicationChanged(Process),
|
||||
/// New app got access to draw the scene
|
||||
SceneFocusChanged(event::Process),
|
||||
InputFocusChanged(event::Process),
|
||||
SceneApplicationSecondaryRenderingStarted(event::Process),
|
||||
SceneFocusChanged(Process),
|
||||
InputFocusChanged(Process),
|
||||
SceneApplicationSecondaryRenderingStarted(Process),
|
||||
|
||||
/// Sent to the scene application to request hiding render models temporarily
|
||||
HideRenderModels,
|
||||
@ -395,10 +271,10 @@ pub enum Event {
|
||||
Notification_BeginInteraction,
|
||||
Notification_Destroyed,
|
||||
|
||||
Quit(event::Process),
|
||||
ProcessQuit(event::Process),
|
||||
QuitAborted_UserPrompt(event::Process),
|
||||
QuitAcknowledged(event::Process),
|
||||
Quit(Process),
|
||||
ProcessQuit(Process),
|
||||
QuitAborted_UserPrompt(Process),
|
||||
QuitAcknowledged(Process),
|
||||
/// The driver has requested that SteamVR shut down
|
||||
DriverRequestedQuit,
|
||||
|
||||
@ -425,7 +301,7 @@ pub enum Event {
|
||||
FirmwareUpdateFinished,
|
||||
|
||||
KeyboardClosed,
|
||||
KeyboardCharInput(event::Keyboard),
|
||||
KeyboardCharInput(Keyboard),
|
||||
/// Sent when DONE button clicked on keyboard
|
||||
KeyboardDone,
|
||||
|
||||
@ -584,4 +460,4 @@ impl Event {
|
||||
}
|
||||
}
|
||||
|
||||
pub type PropertyContainerHandle = sys::PropertyContainerHandle_t;
|
||||
pub use sys::PropertyContainerHandle_t as PropertyContainerHandle;
|
131
src/system/mod.rs
Normal file
131
src/system/mod.rs
Normal file
@ -0,0 +1,131 @@
|
||||
//! The `System` interface provides access to display configuration information, tracking data, controller state,
|
||||
//! events, and device properties. It is the main interface of OpenVR.
|
||||
|
||||
use std::mem;
|
||||
|
||||
use openvr_sys as sys;
|
||||
|
||||
pub mod event;
|
||||
|
||||
use super::*;
|
||||
|
||||
pub use self::event::{Event, EventInfo};
|
||||
|
||||
impl<'a> System<'a> {
|
||||
/// Provides the game with the minimum size that it should use for its offscreen render target to minimize pixel
|
||||
/// stretching. This size is matched with the projection matrix and distortion function and will change from display
|
||||
/// to display depending on resolution, distortion, and field of view.
|
||||
pub fn recommended_render_target_size(&self) -> (u32, u32) {
|
||||
unsafe {
|
||||
let mut result: (u32, u32) = mem::uninitialized();
|
||||
(self.0.GetRecommendedRenderTargetSize.unwrap())(&mut result.0, &mut result.1);
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the projection matrix to use for the specified eye.
|
||||
///
|
||||
/// Clip plane distances are in meters.
|
||||
pub fn projection_matrix(&self, eye: Eye, near_z: f32, far_z: f32) -> [[f32; 4]; 4] {
|
||||
unsafe { (self.0.GetProjectionMatrix.unwrap())(eye as sys::EVREye, near_z, far_z) }.m
|
||||
}
|
||||
|
||||
/// Returns the raw project values to use for the specified eye. Most games should use GetProjectionMatrix instead
|
||||
/// of this method, but sometimes a game needs to do something fancy with its projection and can use these values to
|
||||
/// compute its own matrix.
|
||||
pub fn projection_raw(&self, eye: Eye) -> RawProjection {
|
||||
unsafe {
|
||||
let mut result: RawProjection = mem::uninitialized();
|
||||
(self.0.GetProjectionRaw.unwrap())(eye as sys::EVREye, &mut result.left, &mut result.right, &mut result.top, &mut result.bottom);
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the transform between the view space and eye space. Eye space is the per-eye flavor of view space that
|
||||
/// provides stereo disparity. Instead of Model * View * Projection the model is Model * View * Eye *
|
||||
/// Projection. Normally View and Eye will be multiplied together and treated as View in your application.
|
||||
pub fn eye_to_head_transform(&self, eye: Eye) -> [[f32; 4]; 3] {
|
||||
unsafe { (self.0.GetEyeToHeadTransform.unwrap())(eye as sys::EVREye) }.m
|
||||
}
|
||||
|
||||
/// Returns the number of elapsed seconds since the last recorded vsync event and the global number of frames that
|
||||
/// have been rendered. Timing information will come from a vsync timer event in the timer if possible or from the
|
||||
/// application-reported time if that is not available. If no vsync times are available the function will return
|
||||
/// None.
|
||||
pub fn time_since_last_vsync(&self) -> Option<(f32, u64)> {
|
||||
unsafe {
|
||||
let mut result: (f32, u64) = mem::uninitialized();
|
||||
if (self.0.GetTimeSinceLastVsync.unwrap())(&mut result.0, &mut result.1) {
|
||||
Some(result)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Calculates updated poses for all devices.
|
||||
///
|
||||
/// The pose that the tracker thinks that the HMD will be in at the specified number of seconds into the
|
||||
/// future. Pass 0 to get the state at the instant the method is called. Most of the time the application should
|
||||
/// calculate the time until the photons will be emitted from the display and pass that time into the method.
|
||||
///
|
||||
/// This is roughly analogous to the inverse of the view matrix in most applications, though many games will need to
|
||||
/// do some additional rotation or translation on top of the rotation and translation provided by the head pose.
|
||||
///
|
||||
/// Seated experiences should call this method with TrackingUniverseSeated and receive poses relative to the seated
|
||||
/// zero pose. Standing experiences should call this method with TrackingUniverseStanding and receive poses relative
|
||||
/// to the chaperone soft bounds. TrackingUniverseRawAndUncalibrated should probably not be used unless the
|
||||
/// application is the chaperone calibration tool itself, but will provide poses relative to the hardware-specific
|
||||
/// coordinate system in the driver.
|
||||
pub fn device_to_absolute_tracking_pose(&self, origin: TrackingUniverseOrigin, predicted_seconds_to_photons_from_now: f32) -> TrackedDevicePoses {
|
||||
unsafe {
|
||||
let mut result: TrackedDevicePoses = mem::uninitialized();
|
||||
(self.0.GetDeviceToAbsoluteTrackingPose.unwrap())(origin as sys::ETrackingUniverseOrigin, predicted_seconds_to_photons_from_now,
|
||||
result.data.as_mut().as_mut_ptr() as *mut _, result.data.len() as u32);
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tracked_device_class(&self, index: TrackedDeviceIndex) -> TrackedDeviceClass {
|
||||
use self::TrackedDeviceClass::*;
|
||||
match unsafe { (self.0.GetTrackedDeviceClass.unwrap())(index) } {
|
||||
sys::ETrackedDeviceClass_ETrackedDeviceClass_TrackedDeviceClass_Invalid => Invalid,
|
||||
sys::ETrackedDeviceClass_ETrackedDeviceClass_TrackedDeviceClass_HMD => HMD,
|
||||
sys::ETrackedDeviceClass_ETrackedDeviceClass_TrackedDeviceClass_Controller => Controller,
|
||||
sys::ETrackedDeviceClass_ETrackedDeviceClass_TrackedDeviceClass_GenericTracker => GenericTracker,
|
||||
sys::ETrackedDeviceClass_ETrackedDeviceClass_TrackedDeviceClass_TrackingReference => TrackingReference,
|
||||
sys::ETrackedDeviceClass_ETrackedDeviceClass_TrackedDeviceClass_DisplayRedirect => DisplayRedirect,
|
||||
_ => Invalid,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_tracked_device_connected(&self, index: TrackedDeviceIndex) -> bool {
|
||||
unsafe { (self.0.IsTrackedDeviceConnected.unwrap())(index) }
|
||||
}
|
||||
|
||||
pub fn poll_next_event_with_pose(&self, origin: TrackingUniverseOrigin) -> Option<(EventInfo, TrackedDevicePose)> {
|
||||
let mut event = unsafe { mem::uninitialized() };
|
||||
let mut pose = unsafe { mem::uninitialized() };
|
||||
if unsafe { self.0.PollNextEventWithPose.unwrap()(origin as sys::ETrackingUniverseOrigin,
|
||||
&mut event, mem::size_of_val(&event) as u32,
|
||||
&mut pose as *mut _ as *mut _) }
|
||||
{
|
||||
Some((event.into(), pose))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Values represent the tangents of the half-angles from the center view axis
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct RawProjection {
|
||||
/// tangent of the half-angle from center axis to the left clipping plane
|
||||
pub left: f32,
|
||||
/// tangent of the half-angle from center axis to the right clipping plane
|
||||
pub right: f32,
|
||||
/// tangent of the half-angle from center axis to the top clipping plane
|
||||
pub top: f32,
|
||||
/// tangent of the half-angle from center axis to the bottom clipping plane
|
||||
pub bottom: f32,
|
||||
}
|
Reference in New Issue
Block a user