diff --git a/examples/camera.rs b/examples/camera.rs index e7ae3ee..e6dbb1f 100644 --- a/examples/camera.rs +++ b/examples/camera.rs @@ -1,26 +1,61 @@ extern crate openvr; pub fn main () { - // init vr system - let system = match openvr::init() { - Ok(ivr) => ivr, - Err(err) => { - println!("Failed to create IVRSystem subsystem {:?}", err); + { + // init vr system + let system = match openvr::init() { + Ok(ivr) => ivr, + Err(err) => { + println!("Failed to create IVRSystem subsystem {:?}", err); + return; + } + }; + + // init camera subsystem + let camera = match openvr::subsystems::tracked_camera() { + Ok(ivr) => ivr, + Err(err) => { + println!("Failed to create IVRTrackedCamera subsystem {:?}", err); + return; + } + }; + + // look for tracked devices with a camera + let mut camera_device = None; + for device in system.tracked_devices(0.0).connected_iter() { + if camera.has_camera(&device).unwrap_or(false) { + println!("Tracked Device with camera found, ID: {}", device.index); + println!("\t{:?}", device.device_class()); + println!("\t{:?}", camera.frame_size(&device, openvr::tracked_camera::CameraFrameType::MaximumUndistorted)); + println!("\t{:?}", camera.intrinisics(&device, openvr::tracked_camera::CameraFrameType::MaximumUndistorted)); + + camera_device = Some(device.clone()); + } + } + + // make sure camera is available + if camera_device.is_none() { + println!("No tracked device with camera found. Exiting.."); + + openvr::shutdown(); return; } - }; - let camera = match openvr::subsystems::tracked_camera() { - Ok(ivr) => ivr, - Err(err) => { - println!("Failed to create IVRTrackedCamera subsystem {:?}", err); - return; - } - }; + // create stream + let stream = camera.stream(&camera_device.unwrap()).unwrap_or_else(|err| { + println!("Could not start stream to camera: {}", err.message()); + openvr::shutdown(); + panic!(""); + }); - for device in system.tracked_devices(0.0).connected_iter() { - println!("Device found: {}", device.index); - println!("\t{:?}", device.device_class()); - println!("\t{:?}", camera.has_camera(&device)); + let frame = stream.read(openvr::tracked_camera::CameraFrameType::MaximumUndistorted).unwrap_or_else(|err| { + println!("Could not read from camera stream: {}", err.message()); + openvr::shutdown(); + panic!(""); + }); + + println!("Frame Data recieved! {:?}", frame); } + + openvr::shutdown(); } diff --git a/src/lib.rs b/src/lib.rs index 856d148..944bf03 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -17,6 +17,7 @@ pub use system::IVRSystem; pub use extended_display::IVRExtendedDisplay; pub use compositor::IVRCompositor; pub use render_models::IVRRenderModels; +pub use tracked_camera::IVRTrackedCamera; pub use subsystems::*; pub use error::*; diff --git a/src/tracked_camera.rs b/src/tracked_camera.rs index 5834978..deae253 100644 --- a/src/tracked_camera.rs +++ b/src/tracked_camera.rs @@ -2,9 +2,129 @@ use openvr_sys; use tracking::*; use error::*; +use subsystems::*; pub struct IVRTrackedCamera(pub *const ()); +#[derive(Debug, Copy, Clone)] +pub struct CameraFrameSize { + pub width: u32, + pub height: u32, + pub buffer: u32 +} + +#[derive(Debug, Copy, Clone)] +pub enum CameraFrameType { + Distorted, + Undistorted, + MaximumUndistorted, + MaximumFrameTypes +} + +#[derive(Debug, Copy, Clone)] +pub struct CameraIntriniscs { + pub focal_length: [f32; 2], + pub center: [f32; 2] +} + +#[derive(Debug, Copy, Clone)] +pub struct CameraFrameHeader { + pub width: u32, + pub height: u32, + pub bytes_per_pixel: u32, + pub frame_sequence: u32, + pub pose: TrackedDevicePose +} + +#[derive(Debug)] +pub struct CameraFrame { + pub framebuffer: Vec, + pub header: CameraFrameHeader +} + +pub struct CameraStream { + pub handle: u64, + pub owner: TrackedDevicePose +} + +impl Drop for CameraStream { + fn drop(&mut self) { + unsafe { + let cam = *{ tracked_camera().unwrap().0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable }; + + let error = Error::from_raw( + cam.ReleaseVideoStreamingService.unwrap()(self.handle)); + + if error.is_err() { + println!("Failed to drop camera stream! Possible memory leak! {}", error.message()); + } + } + } +} + +impl CameraStream { + /// reads current camera frame + pub fn read(&self, ctype: CameraFrameType) -> Result> { + use std::mem; + use std; + + unsafe { + // get subsystems + let cam = *{ tracked_camera().unwrap().0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable }; + let size = tracked_camera().unwrap().frame_size(&self.owner, ctype).unwrap(); + + // create raw buffer where openvr can store it's data into + let mut buffer = Vec::::with_capacity(size.buffer as usize); + let mut raw_buffer = buffer.as_mut_ptr(); + mem::forget(buffer); + + // create header + let mut header = openvr_sys::CameraVideoStreamFrameHeader_t::default(); + + let error = Error::from_raw( + cam.GetVideoStreamFrameBuffer.unwrap()( + self.handle, + ctype.to_raw(), + raw_buffer as *mut std::os::raw::c_void, + size.buffer, + &mut header, + mem::size_of::() as u32 + )); + + if error.is_ok() { + // bring framebuffer back into rusts controll + let buffer = Vec::from_raw_parts(raw_buffer, size.buffer as usize, size.buffer as usize); + + return Ok(CameraFrame { + framebuffer: buffer, + header: CameraFrameHeader { + width: header.nWidth, + height: header.nHeight, + bytes_per_pixel: header.nBytesPerPixel, + frame_sequence: header.nFrameSequence, + pose: TrackedDevicePose::from_raw(self.owner.index, header.standingTrackedDevicePose) + } + }); + } else { + return Err(error); + } + } + } +} + +impl CameraFrameType { + pub fn to_raw(&self) -> openvr_sys::EVRTrackedCameraFrameType { + use openvr_sys::EVRTrackedCameraFrameType::*; + + match self { + &CameraFrameType::Distorted => EVRTrackedCameraFrameType_VRTrackedCameraFrameType_Distorted, + &CameraFrameType::Undistorted => EVRTrackedCameraFrameType_VRTrackedCameraFrameType_Undistorted, + &CameraFrameType::MaximumUndistorted => EVRTrackedCameraFrameType_VRTrackedCameraFrameType_MaximumUndistorted , + &CameraFrameType::MaximumFrameTypes => EVRTrackedCameraFrameType_MAX_CAMERA_FRAME_TYPES + } + } +} + impl IVRTrackedCamera { pub unsafe fn from_raw(ptr: *const ()) -> Self { IVRTrackedCamera(ptr as *mut ()) @@ -26,4 +146,79 @@ impl IVRTrackedCamera { } } } + + /// gets frame buffer information of camera + pub fn frame_size(&self, device: &TrackedDevicePose, ctype: CameraFrameType) + -> Result> + { + unsafe { + let mut result = CameraFrameSize { + width: 0, + height: 0, + buffer: 0, + }; + + let cam = *{ self.0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable }; + + let error = Error::from_raw( + cam.GetCameraFrameSize.unwrap()(device.index as u32, + ctype.to_raw(), + &mut result.width, + &mut result.height, + &mut result.buffer)); + + if error.is_ok() { + return Ok(result); + } else { + return Err(error); + } + } + } + + // gets camera intrinsic + pub fn intrinisics(&self, device: &TrackedDevicePose, ctype: CameraFrameType) + -> Result> + { + unsafe { + let mut focal = openvr_sys::HmdVector2_t { v: [0.0, 0.0] }; + let mut center = openvr_sys::HmdVector2_t { v: [0.0, 0.0] }; + + let cam = *{ self.0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable }; + + let error = Error::from_raw( + cam.GetCameraIntrinisics.unwrap()(device.index as u32, + ctype.to_raw(), + &mut focal, + &mut center)); + + if error.is_ok() { + return Ok(CameraIntriniscs { + focal_length: focal.v, + center: center.v + }); + } else { + return Err(error); + } + } + } + + /// aquires a stream to the given camera device + pub fn stream(&self, device: &TrackedDevicePose) -> Result> { + unsafe { + let cam = *{ self.0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable }; + let mut handle = 0u64; + + let error = Error::from_raw( + cam.AcquireVideoStreamingService.unwrap()(device.index as u32, &mut handle)); + + if error.is_ok() { + return Ok(CameraStream { + handle: handle, + owner: *device + }); + } else { + return Err(error); + } + } + } } diff --git a/src/tracking.rs b/src/tracking.rs index 7f4419c..3c0565a 100644 --- a/src/tracking.rs +++ b/src/tracking.rs @@ -104,6 +104,17 @@ pub struct TrackedDevicePose { } impl TrackedDevicePose { + pub fn from_raw(i: usize, d: openvr_sys::TrackedDevicePose_t) -> Self { + TrackedDevicePose { + index: i, + is_connected: d.bDeviceIsConnected > 0, + is_valid: d.bPoseIsValid > 0, + to_device: d.mDeviceToAbsoluteTracking.m, + velocity: d.vVelocity.v, + angular_velocity: d.vAngularVelocity.v, + } + } + // returns the device class of the tracked object pub fn device_class(&self) -> TrackedDeviceClass { unsafe { @@ -173,6 +184,10 @@ impl<'a> Iterator for TrackedDevicePosesIterator<'a> { } let res = &self.target.poses[self.index]; + if !res.is_valid || !res.is_connected { + return None; + } + self.index += 1; Some(res)