Finishes camera subsystem

This commit is contained in:
Rene Eichhorn
2016-06-11 17:55:24 +02:00
parent 371b34a548
commit a9d257738f
4 changed files with 263 additions and 17 deletions

View File

@ -1,26 +1,61 @@
extern crate openvr;
pub fn main () {
// init vr system
let system = match openvr::init() {
Ok(ivr) => ivr,
Err(err) => {
println!("Failed to create IVRSystem subsystem {:?}", err);
{
// init vr system
let system = match openvr::init() {
Ok(ivr) => ivr,
Err(err) => {
println!("Failed to create IVRSystem subsystem {:?}", err);
return;
}
};
// init camera subsystem
let camera = match openvr::subsystems::tracked_camera() {
Ok(ivr) => ivr,
Err(err) => {
println!("Failed to create IVRTrackedCamera subsystem {:?}", err);
return;
}
};
// look for tracked devices with a camera
let mut camera_device = None;
for device in system.tracked_devices(0.0).connected_iter() {
if camera.has_camera(&device).unwrap_or(false) {
println!("Tracked Device with camera found, ID: {}", device.index);
println!("\t{:?}", device.device_class());
println!("\t{:?}", camera.frame_size(&device, openvr::tracked_camera::CameraFrameType::MaximumUndistorted));
println!("\t{:?}", camera.intrinisics(&device, openvr::tracked_camera::CameraFrameType::MaximumUndistorted));
camera_device = Some(device.clone());
}
}
// make sure camera is available
if camera_device.is_none() {
println!("No tracked device with camera found. Exiting..");
openvr::shutdown();
return;
}
};
let camera = match openvr::subsystems::tracked_camera() {
Ok(ivr) => ivr,
Err(err) => {
println!("Failed to create IVRTrackedCamera subsystem {:?}", err);
return;
}
};
// create stream
let stream = camera.stream(&camera_device.unwrap()).unwrap_or_else(|err| {
println!("Could not start stream to camera: {}", err.message());
openvr::shutdown();
panic!("");
});
for device in system.tracked_devices(0.0).connected_iter() {
println!("Device found: {}", device.index);
println!("\t{:?}", device.device_class());
println!("\t{:?}", camera.has_camera(&device));
let frame = stream.read(openvr::tracked_camera::CameraFrameType::MaximumUndistorted).unwrap_or_else(|err| {
println!("Could not read from camera stream: {}", err.message());
openvr::shutdown();
panic!("");
});
println!("Frame Data recieved! {:?}", frame);
}
openvr::shutdown();
}

View File

@ -17,6 +17,7 @@ pub use system::IVRSystem;
pub use extended_display::IVRExtendedDisplay;
pub use compositor::IVRCompositor;
pub use render_models::IVRRenderModels;
pub use tracked_camera::IVRTrackedCamera;
pub use subsystems::*;
pub use error::*;

View File

@ -2,9 +2,129 @@ use openvr_sys;
use tracking::*;
use error::*;
use subsystems::*;
pub struct IVRTrackedCamera(pub *const ());
#[derive(Debug, Copy, Clone)]
pub struct CameraFrameSize {
pub width: u32,
pub height: u32,
pub buffer: u32
}
#[derive(Debug, Copy, Clone)]
pub enum CameraFrameType {
Distorted,
Undistorted,
MaximumUndistorted,
MaximumFrameTypes
}
#[derive(Debug, Copy, Clone)]
pub struct CameraIntriniscs {
pub focal_length: [f32; 2],
pub center: [f32; 2]
}
#[derive(Debug, Copy, Clone)]
pub struct CameraFrameHeader {
pub width: u32,
pub height: u32,
pub bytes_per_pixel: u32,
pub frame_sequence: u32,
pub pose: TrackedDevicePose
}
#[derive(Debug)]
pub struct CameraFrame {
pub framebuffer: Vec<u8>,
pub header: CameraFrameHeader
}
pub struct CameraStream {
pub handle: u64,
pub owner: TrackedDevicePose
}
impl Drop for CameraStream {
fn drop(&mut self) {
unsafe {
let cam = *{ tracked_camera().unwrap().0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable };
let error = Error::from_raw(
cam.ReleaseVideoStreamingService.unwrap()(self.handle));
if error.is_err() {
println!("Failed to drop camera stream! Possible memory leak! {}", error.message());
}
}
}
}
impl CameraStream {
/// reads current camera frame
pub fn read(&self, ctype: CameraFrameType) -> Result<CameraFrame, Error<openvr_sys::EVRTrackedCameraError>> {
use std::mem;
use std;
unsafe {
// get subsystems
let cam = *{ tracked_camera().unwrap().0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable };
let size = tracked_camera().unwrap().frame_size(&self.owner, ctype).unwrap();
// create raw buffer where openvr can store it's data into
let mut buffer = Vec::<u8>::with_capacity(size.buffer as usize);
let mut raw_buffer = buffer.as_mut_ptr();
mem::forget(buffer);
// create header
let mut header = openvr_sys::CameraVideoStreamFrameHeader_t::default();
let error = Error::from_raw(
cam.GetVideoStreamFrameBuffer.unwrap()(
self.handle,
ctype.to_raw(),
raw_buffer as *mut std::os::raw::c_void,
size.buffer,
&mut header,
mem::size_of::<openvr_sys::CameraVideoStreamFrameHeader_t>() as u32
));
if error.is_ok() {
// bring framebuffer back into rusts controll
let buffer = Vec::from_raw_parts(raw_buffer, size.buffer as usize, size.buffer as usize);
return Ok(CameraFrame {
framebuffer: buffer,
header: CameraFrameHeader {
width: header.nWidth,
height: header.nHeight,
bytes_per_pixel: header.nBytesPerPixel,
frame_sequence: header.nFrameSequence,
pose: TrackedDevicePose::from_raw(self.owner.index, header.standingTrackedDevicePose)
}
});
} else {
return Err(error);
}
}
}
}
impl CameraFrameType {
pub fn to_raw(&self) -> openvr_sys::EVRTrackedCameraFrameType {
use openvr_sys::EVRTrackedCameraFrameType::*;
match self {
&CameraFrameType::Distorted => EVRTrackedCameraFrameType_VRTrackedCameraFrameType_Distorted,
&CameraFrameType::Undistorted => EVRTrackedCameraFrameType_VRTrackedCameraFrameType_Undistorted,
&CameraFrameType::MaximumUndistorted => EVRTrackedCameraFrameType_VRTrackedCameraFrameType_MaximumUndistorted ,
&CameraFrameType::MaximumFrameTypes => EVRTrackedCameraFrameType_MAX_CAMERA_FRAME_TYPES
}
}
}
impl IVRTrackedCamera {
pub unsafe fn from_raw(ptr: *const ()) -> Self {
IVRTrackedCamera(ptr as *mut ())
@ -26,4 +146,79 @@ impl IVRTrackedCamera {
}
}
}
/// gets frame buffer information of camera
pub fn frame_size(&self, device: &TrackedDevicePose, ctype: CameraFrameType)
-> Result<CameraFrameSize, Error<openvr_sys::EVRTrackedCameraError>>
{
unsafe {
let mut result = CameraFrameSize {
width: 0,
height: 0,
buffer: 0,
};
let cam = *{ self.0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable };
let error = Error::from_raw(
cam.GetCameraFrameSize.unwrap()(device.index as u32,
ctype.to_raw(),
&mut result.width,
&mut result.height,
&mut result.buffer));
if error.is_ok() {
return Ok(result);
} else {
return Err(error);
}
}
}
// gets camera intrinsic
pub fn intrinisics(&self, device: &TrackedDevicePose, ctype: CameraFrameType)
-> Result<CameraIntriniscs, Error<openvr_sys::EVRTrackedCameraError>>
{
unsafe {
let mut focal = openvr_sys::HmdVector2_t { v: [0.0, 0.0] };
let mut center = openvr_sys::HmdVector2_t { v: [0.0, 0.0] };
let cam = *{ self.0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable };
let error = Error::from_raw(
cam.GetCameraIntrinisics.unwrap()(device.index as u32,
ctype.to_raw(),
&mut focal,
&mut center));
if error.is_ok() {
return Ok(CameraIntriniscs {
focal_length: focal.v,
center: center.v
});
} else {
return Err(error);
}
}
}
/// aquires a stream to the given camera device
pub fn stream(&self, device: &TrackedDevicePose) -> Result<CameraStream, Error<openvr_sys::EVRTrackedCameraError>> {
unsafe {
let cam = *{ self.0 as *mut openvr_sys::VR_IVRTrackedCamera_FnTable };
let mut handle = 0u64;
let error = Error::from_raw(
cam.AcquireVideoStreamingService.unwrap()(device.index as u32, &mut handle));
if error.is_ok() {
return Ok(CameraStream {
handle: handle,
owner: *device
});
} else {
return Err(error);
}
}
}
}

View File

@ -104,6 +104,17 @@ pub struct TrackedDevicePose {
}
impl TrackedDevicePose {
pub fn from_raw(i: usize, d: openvr_sys::TrackedDevicePose_t) -> Self {
TrackedDevicePose {
index: i,
is_connected: d.bDeviceIsConnected > 0,
is_valid: d.bPoseIsValid > 0,
to_device: d.mDeviceToAbsoluteTracking.m,
velocity: d.vVelocity.v,
angular_velocity: d.vAngularVelocity.v,
}
}
// returns the device class of the tracked object
pub fn device_class(&self) -> TrackedDeviceClass {
unsafe {
@ -173,6 +184,10 @@ impl<'a> Iterator for TrackedDevicePosesIterator<'a> {
}
let res = &self.target.poses[self.index];
if !res.is_valid || !res.is_connected {
return None;
}
self.index += 1;
Some(res)