This commit is contained in:
mii443
2025-06-09 03:03:37 +09:00
commit 581d26647e
4 changed files with 3166 additions and 0 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/target

2882
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

16
Cargo.toml Normal file
View File

@ -0,0 +1,16 @@
[package]
name = "vrc-captool"
version = "0.1.0"
edition = "2021"
[dependencies]
gl = "=0.14.0"
glutin = "=0.24"
image = "=0.25.1"
openvr = "=0.6.0"
openvr_sys = "=2.0.3"
windows-capture = "=1.0.36"
winapi = { version = "=0.3", features = ["d3d11", "winerror", "unknwnbase"] }
wio = "=0.2"
winit = "=0.28.0"
rayon = "=1.10.0"

267
src/main.rs Normal file
View File

@ -0,0 +1,267 @@
use std::{ffi::c_void, mem::MaybeUninit, ptr};
use image::ImageBuffer;
use openvr::ApplicationType;
use openvr_sys::EVREye_Eye_Left;
use winapi::{
shared::{
dxgitype::DXGI_SAMPLE_DESC,
winerror::{DXGI_ERROR_UNSUPPORTED, SUCCEEDED},
},
um::{
d3d11::{
D3D11CreateDevice, ID3D11Device, ID3D11DeviceContext, ID3D11Resource,
ID3D11ShaderResourceView, ID3D11ShaderResourceViewVtbl, ID3D11Texture2D, ID3D11View,
D3D11_CPU_ACCESS_READ, D3D11_CREATE_DEVICE_BGRA_SUPPORT, D3D11_CREATE_DEVICE_FLAG,
D3D11_MAPPED_SUBRESOURCE, D3D11_MAP_READ, D3D11_SDK_VERSION, D3D11_TEXTURE2D_DESC,
D3D11_USAGE_STAGING,
},
d3dcommon::{D3D_DRIVER_TYPE, D3D_DRIVER_TYPE_HARDWARE, D3D_DRIVER_TYPE_WARP},
unknwnbase::IUnknown,
},
Interface,
};
type HRESULT = i32;
fn rgb_endian_flip(r: u32) -> u32 {
let (r, g, b) = (r.to_le_bytes()[0], r.to_le_bytes()[1], r.to_le_bytes()[2]);
u32::from_be_bytes([0, r, g, b])
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
println!("Initializing OpenVR...");
// Initialize OpenVR
let context = unsafe {
openvr::init(ApplicationType::Background)
.map_err(|e| format!("Failed to initialize OpenVR: {:?}", e))?
};
let compositor = context
.compositor()
.map_err(|e| format!("Failed to get compositor: {:?}", e))?;
println!("Creating D3D11 device...");
// Create D3D11 device
let device = create_d3d_device()?;
unsafe {
// Get mirror texture from OpenVR
println!("Getting mirror texture...");
let vr_compositor_fn_table: &openvr_sys::VR_IVRCompositor_FnTable =
std::mem::transmute(compositor);
let mut shader_resource_view: *mut ID3D11ShaderResourceView = ptr::null_mut();
let get_mirror_result = vr_compositor_fn_table
.GetMirrorTextureD3D11
.ok_or("GetMirrorTextureD3D11 function not available")?;
get_mirror_result(
EVREye_Eye_Left,
device as *mut c_void,
&mut shader_resource_view as *mut _ as *mut *mut c_void,
);
if shader_resource_view.is_null() {
return Err("Failed to get mirror texture".into());
}
// Get the underlying texture resource using vtable
let view_vtbl = (*shader_resource_view).lpVtbl as *mut ID3D11ShaderResourceViewVtbl;
let mut resource: *mut ID3D11Resource = ptr::null_mut();
((*view_vtbl).parent.GetResource)(
shader_resource_view as *mut ID3D11View,
&mut resource as *mut _ as *mut *mut ID3D11Resource,
);
if resource.is_null() {
return Err("Failed to get resource".into());
}
// Query for ID3D11Texture2D interface
let mut texture2d: *mut ID3D11Texture2D = ptr::null_mut();
let hr = (*(resource as *mut IUnknown)).QueryInterface(
&ID3D11Texture2D::uuidof(),
&mut texture2d as *mut _ as *mut *mut c_void,
);
if !SUCCEEDED(hr) {
return Err(format!("Failed to query ID3D11Texture2D: {:x}", hr).into());
}
// Get texture description using vtable
let mut desc = MaybeUninit::<D3D11_TEXTURE2D_DESC>::uninit();
(*texture2d).GetDesc(desc.as_mut_ptr());
let desc = desc.assume_init();
println!("Texture dimensions: {}x{}", desc.Width, desc.Height);
// Create staging texture for CPU read access
let staging_desc = D3D11_TEXTURE2D_DESC {
Width: desc.Width,
Height: desc.Height,
Format: desc.Format,
Usage: D3D11_USAGE_STAGING,
SampleDesc: DXGI_SAMPLE_DESC {
Count: 1,
Quality: 0,
},
CPUAccessFlags: D3D11_CPU_ACCESS_READ,
ArraySize: 1,
BindFlags: 0,
MiscFlags: 0, // Remove shared flag for staging texture
MipLevels: 1,
};
let mut staging_texture: *mut ID3D11Texture2D = ptr::null_mut();
let hr = ((*(*device).lpVtbl).CreateTexture2D)(
device,
&staging_desc,
ptr::null(),
&mut staging_texture,
);
if !SUCCEEDED(hr) {
return Err(format!("Failed to create staging texture: {:x}", hr).into());
}
// Get device context using vtable
let mut context: *mut ID3D11DeviceContext = ptr::null_mut();
(*device).GetImmediateContext(&mut context);
// Copy texture to staging texture using vtable
((*(*context).lpVtbl).CopyResource)(
context,
staging_texture as *mut ID3D11Resource,
resource,
);
// Map the staging texture for reading using vtable
let mut mapped = MaybeUninit::<D3D11_MAPPED_SUBRESOURCE>::uninit();
let hr = ((*(*context).lpVtbl).Map)(
context,
staging_texture as *mut ID3D11Resource,
0,
D3D11_MAP_READ,
0,
mapped.as_mut_ptr(),
);
if !SUCCEEDED(hr) {
return Err(format!("Failed to map staging texture: {:x}", hr).into());
}
let mapped = mapped.assume_init();
// Create image buffer with proper size validation
if desc.Width == 0 || desc.Height == 0 || desc.Width > 16384 || desc.Height > 16384 {
return Err(
format!("Invalid texture dimensions: {}x{}", desc.Width, desc.Height).into(),
);
}
println!("Processing texture data...");
// Calculate buffer size (3 bytes per pixel for RGB)
let buffer_size = (desc.Width * desc.Height * 3) as usize;
let mut pixel_data = vec![0u8; buffer_size];
// Process pixel data safely with proper endian handling
if !mapped.pData.is_null() {
for y in 0..desc.Height {
let row_start = mapped.pData.add((mapped.RowPitch * y) as usize) as *const u32;
for x in 0..desc.Width {
// Read pixel as 32-bit value
let pixel = *row_start.add(x as usize);
// Apply endian flip to get correct RGB values
let rgb = rgb_endian_flip(pixel);
// Extract RGB components
let r = ((rgb >> 16) & 0xFF) as u8;
let g = ((rgb >> 8) & 0xFF) as u8;
let b = (rgb & 0xFF) as u8;
// Calculate destination buffer index
let dst_idx = ((y * desc.Width + x) * 3) as usize;
// Bounds check for destination buffer
if dst_idx + 2 < buffer_size {
pixel_data[dst_idx] = r;
pixel_data[dst_idx + 1] = g;
pixel_data[dst_idx + 2] = b;
}
}
}
}
// Create image from processed pixel data
let imgbuf =
ImageBuffer::<image::Rgb<u8>, Vec<u8>>::from_raw(desc.Width, desc.Height, pixel_data)
.ok_or("Failed to create image buffer from pixel data")?;
// Unmap the resource using vtable
((*(*context).lpVtbl).Unmap)(context, staging_texture as *mut ID3D11Resource, 0);
// Release mirror texture
if let Some(release_fn) = vr_compositor_fn_table.ReleaseMirrorTextureD3D11 {
release_fn(shader_resource_view as *mut c_void);
}
println!("Saving image...");
imgbuf
.save("./openvr_capture.png")
.map_err(|e| format!("Failed to save image: {}", e))?;
println!("Successfully saved openvr_capture.png");
}
// OpenVR will be shut down when context goes out of scope
Ok(())
}
fn create_d3d_device_with_type(
driver_type: D3D_DRIVER_TYPE,
flags: D3D11_CREATE_DEVICE_FLAG,
) -> Result<*mut ID3D11Device, HRESULT> {
unsafe {
let mut device = ptr::null_mut();
let hr = D3D11CreateDevice(
ptr::null_mut(),
driver_type,
ptr::null_mut(),
flags,
ptr::null(),
0,
D3D11_SDK_VERSION,
&mut device,
ptr::null_mut(),
ptr::null_mut(),
);
if SUCCEEDED(hr) {
Ok(device)
} else {
Err(hr)
}
}
}
fn create_d3d_device() -> Result<*mut ID3D11Device, Box<dyn std::error::Error>> {
// Try hardware device first
match create_d3d_device_with_type(D3D_DRIVER_TYPE_HARDWARE, D3D11_CREATE_DEVICE_BGRA_SUPPORT) {
Ok(device) => Ok(device),
Err(hr) if hr == DXGI_ERROR_UNSUPPORTED => {
println!("Hardware device not supported, trying WARP...");
// Fallback to WARP device
create_d3d_device_with_type(D3D_DRIVER_TYPE_WARP, D3D11_CREATE_DEVICE_BGRA_SUPPORT)
.map_err(|hr| format!("Failed to create WARP device: {:x}", hr).into())
}
Err(hr) => Err(format!("Failed to create D3D11 device: {:x}", hr).into()),
}
}