diff --git a/examples/window-rgb.rs b/examples/window-rgb.rs index ece4268..71953c2 100644 --- a/examples/window-rgb.rs +++ b/examples/window-rgb.rs @@ -2,7 +2,7 @@ use std::num::NonZeroU32; use winit::{ dpi::PhysicalSize, - event::{Event, WindowEvent}, + event::{DeviceEvent, ElementState, Event, WindowEvent}, event_loop::{ControlFlow, EventLoop}, window::WindowBuilder, }; @@ -15,7 +15,7 @@ fn main() { let context = unsafe { softbuffer::Context::new(&window) }.unwrap(); let mut surface = unsafe { softbuffer::Surface::new(&context, &window) }.unwrap(); - let camera = Camera::new_default_device(); + let mut camera = Camera::new_default_device(); camera.start(); event_loop.run(move |event, _x, control_flow| { @@ -23,9 +23,7 @@ fn main() { match event { Event::RedrawRequested(window_id) if window_id == window.id() => { - let Some(frame) = camera.wait_for_frame() else { - return - }; + let Some(frame) = camera.wait_for_frame() else { return }; let (w, h) = frame.size_u32(); surface.resize(NonZeroU32::new(w).unwrap(), NonZeroU32::new(h).unwrap()).unwrap(); @@ -47,6 +45,12 @@ fn main() { Event::RedrawEventsCleared => { window.request_redraw(); } + Event::DeviceEvent { + event: DeviceEvent::Button { button: _, state: ElementState::Released }, + device_id: _, + } => { + camera.change_device(); + } _ => {} } }); diff --git a/src/camera.rs b/src/camera.rs index 0527ecc..09e56e5 100644 --- a/src/camera.rs +++ b/src/camera.rs @@ -37,6 +37,10 @@ impl Camera { pub fn wait_for_frame(&self) -> Option { self.inner.wait_for_frame().map(|inner| Frame { inner }) } + + pub fn change_device(&mut self) { + self.inner.change_device(); + } } impl Frame { @@ -66,4 +70,5 @@ pub(crate) trait InnerCamera: std::fmt::Debug { fn start(&self); fn stop(&self); fn wait_for_frame(&self) -> Option; + fn change_device(&mut self); } diff --git a/src/linux_v4l2/mod.rs b/src/linux_v4l2/mod.rs index f7d82b7..2e9d16c 100644 --- a/src/linux_v4l2/mod.rs +++ b/src/linux_v4l2/mod.rs @@ -1,11 +1,13 @@ use ffimage::color::Bgra; +use v4l::context::Node; use v4l::io::traits::CaptureStream; use v4l::video::Capture; use v4l::*; use std::marker::PhantomData; + use std::sync::RwLock; use crate::InnerCamera; @@ -16,52 +18,78 @@ pub struct Camera { stream: RwLock>>, } -impl InnerCamera for Camera { - type Frame = Frame; - - fn new_default_device() -> Self { - let device_node = v4l::context::enum_devices().into_iter().next().unwrap(); - let device_name = - device_node.name().unwrap_or_else(|| device_node.path().to_string_lossy().to_string()); +fn name_or_path(device_node: &v4l::context::Node) -> String { + device_node.name().unwrap_or_else(|| device_node.path().to_string_lossy().to_string()) +} - println!( - "Node {{ index: {}, name: {:?}, path: {:?} }}", - device_node.index(), - device_node.name(), - device_node.path() - ); +fn get_next_best_format(device: &Device) -> Format { + let _rgb = FourCC::new(b"RGB3"); + let mut fmt = device.format().expect("device.format()"); + let size = device + .enum_framesizes(fmt.fourcc) + .unwrap() + .into_iter() + .next() + .unwrap() + .size + .to_discrete() + .into_iter() + .last() + .unwrap(); + fmt.width = size.width; + fmt.height = size.height; + fmt +} - let device = v4l::Device::new(0).unwrap(); +#[allow(unused)] +fn display_node(node: &Node) { + println!( + "Node {{ index: {}, name: {:?}, path: {:?} }}", + node.index(), + node.name(), + node.path() + ); +} - for fmt in device.enum_formats().unwrap() { - println!("{:?}", fmt); +#[allow(unused)] +fn display_device_formats(device: &Device) { + println!("Device formats:"); + for fmt in device.enum_formats().unwrap() { + println!(" {:?}", fmt); - for size in device.enum_framesizes(fmt.fourcc).unwrap() { - println!("{:?}", size); - } + for size in device.enum_framesizes(fmt.fourcc).unwrap() { + println!(" {:?}", size); } + } +} + +fn enum_devices() -> Vec { + v4l::context::enum_devices() + .into_iter() + .filter_map(|node| Device::with_path(node.path()).ok().map(|device| (node, device))) + .filter(|(_, device)| device.format().is_ok()) + .map(|(node, _)| node) + .collect() +} - let _rgb = FourCC::new(b"RGB3"); - let mut fmt = device.format().unwrap(); - let size = device - .enum_framesizes(fmt.fourcc) - .unwrap() - .into_iter() - .next() - .unwrap() - .size - .to_discrete() - .into_iter() - .last() - .unwrap(); - fmt.width = size.width; - fmt.height = size.height; - - if let Err(error) = device.set_format(&fmt) { - eprintln!("Device.set_format: {}", error); +impl Camera { + fn from_node(node: &v4l::context::Node) -> Self { + let device = v4l::Device::with_path(node.path()).unwrap(); + device.set_format(&get_next_best_format(&device)).unwrap(); + Self { + device: RwLock::new(device), + device_name: name_or_path(node), + stream: RwLock::new(None), } + } +} + +impl InnerCamera for Camera { + type Frame = Frame; - Self { device: RwLock::new(device), device_name, stream: RwLock::new(None) } + fn new_default_device() -> Self { + let node = enum_devices().into_iter().next().unwrap(); + Self::from_node(&node) } fn start(&self) { @@ -94,6 +122,22 @@ impl InnerCamera for Camera { None } } + + fn change_device(&mut self) { + let devices = enum_devices(); + if let Some(pos) = devices.iter().position(|n| name_or_path(n) == self.device_name) { + let new_pos = (pos + 1) % devices.len(); + if new_pos != pos { + *self = Self::from_node(&devices[new_pos]); + self.start(); + } + } else if !devices.is_empty() { + *self = Self::from_node(&devices[0]); + self.start(); + } else { + self.stop(); + } + } } impl std::fmt::Debug for Camera { diff --git a/src/mac_avf/av_capture_session.rs b/src/mac_avf/av_capture_session.rs index 9812477..3c8f8d8 100644 --- a/src/mac_avf/av_capture_session.rs +++ b/src/mac_avf/av_capture_session.rs @@ -42,6 +42,10 @@ impl AVCaptureSession { pub fn add_output(&self, output: &AVCaptureVideoDataOutput) { unsafe { msg_send!(self, addOutput: output) } } + + pub fn remove_input(&self, input: &AVCaptureDeviceInput) { + unsafe { msg_send!(self, removeInput: input) } + } } #[test] diff --git a/src/mac_avf/camera.rs b/src/mac_avf/camera.rs index 976ad83..edad790 100644 --- a/src/mac_avf/camera.rs +++ b/src/mac_avf/camera.rs @@ -4,9 +4,10 @@ use std::sync::Arc; #[derive(Debug)] pub struct Camera { - _device: Id, - _input: Id, - _output: Id, + device: Id, + input: Id, + #[allow(unused)] + output: Id, session: Id, slot: Arc, } @@ -33,7 +34,7 @@ impl Camera { session.add_input(&input); session.add_output(&output); - Camera { _device: device, _input: input, _output: output, session, slot } + Camera { device, input, output, session, slot } } pub fn start(&self) { @@ -47,6 +48,24 @@ impl Camera { pub fn wait_for_frame(&self) -> Option { self.slot.wait_for_sample().map(|sample| Frame { sample }) } + + pub fn change_device(&mut self) { + let devices = AVCaptureDevice::all_video_devices(); + let Some(index) = devices.iter().position(|d| d.unique_id() == self.device.unique_id()) + else { + return; + }; + let new_index = (index + 1) % devices.len(); + if new_index == index { + return; + } + let new_device = devices[new_index].retain(); + let new_input = AVCaptureDeviceInput::from_device(&new_device).unwrap(); + self.session.remove_input(&self.input); + self.device = new_device; + self.input = new_input; + self.session.add_input(&self.input); + } } impl Frame { @@ -69,3 +88,24 @@ impl<'a> FrameData<'a> { self.pixels.u32 } } + +#[cfg(test)] +const TEST_FRAMES: usize = 3; + +#[test] +fn change_device() { + let mut camera = Camera::new_default_device(); + camera.start(); + + std::iter::from_fn(|| camera.wait_for_frame()) + .map(|s| println!("{s:?}")) + .take(TEST_FRAMES) + .count(); + + camera.change_device(); + + std::iter::from_fn(|| camera.wait_for_frame()) + .map(|s| println!("{s:?}")) + .take(TEST_FRAMES) + .count(); +} diff --git a/src/mac_avf/test_scenarios.rs b/src/mac_avf/test_scenarios.rs index 280a3cf..8ca47ff 100644 --- a/src/mac_avf/test_scenarios.rs +++ b/src/mac_avf/test_scenarios.rs @@ -1,3 +1,5 @@ +use objc2::rc::Id; + use super::*; const TEST_FRAMES: usize = 3; @@ -121,3 +123,37 @@ fn running_capture_session_for_all_cameras_in_yuv2() { session.stop_running(); } } + +#[test] +fn running_capture_session_from_changing_cameras() { + println!(); + let session = AVCaptureSession::new(); + let output = AVCaptureVideoDataOutput::new(); + let delegate = SampleBufferDelegate::new(); + let slot = delegate.slot(); + output.set_sample_buffer_delegate(delegate); + session.add_output(&output); + + let mut input: Option> = None; + + session.start_running(); + + for device in AVCaptureDevice::all_video_devices() { + println!("{}", device.localized_name()); + + if let Some(input) = input { + session.remove_input(&input); + } + + let new_input = AVCaptureDeviceInput::from_device(&device).unwrap(); + session.add_input(&new_input); + input = Some(new_input); + + std::iter::from_fn(|| slot.wait_for_sample()) + .map(|s| println!("{s:?}")) + .take(TEST_FRAMES) + .count(); + } + + session.stop_running(); +} diff --git a/src/win_mf/camera.rs b/src/win_mf/camera.rs index b80688e..38a3003 100644 --- a/src/win_mf/camera.rs +++ b/src/win_mf/camera.rs @@ -1,6 +1,6 @@ use super::mf::*; -use std::sync::mpsc::*; +use std::{sync::mpsc::*, time::Duration}; use windows::Win32::Media::MediaFoundation::*; @@ -8,8 +8,7 @@ use windows::Win32::Media::MediaFoundation::*; #[derive(Debug)] pub struct Camera { engine: IMFCaptureEngine, - device: IMFActivate, - media_source: IMFMediaSource, + device: Device, event_rx: Receiver, sample_rx: Receiver>, event_cb: IMFCaptureEngineOnEventCallback, @@ -36,14 +35,12 @@ impl Camera { let event_cb = CaptureEventCallback { event_tx }.into(); let sample_cb = CaptureSampleCallback { sample_tx }.into(); - let devices = enum_device_sources(); + let devices = Device::enum_devices(); let Some(device) = devices.first().cloned() else { todo!() }; - let media_source = activate_to_media_source(&device); - init_capture_engine(&engine, Some(&media_source), &event_cb).unwrap(); + init_capture_engine(&engine, Some(&device.source), &event_cb).unwrap(); - let camera = - Camera { engine, device, media_source, event_rx, sample_rx, event_cb, sample_cb }; + let camera = Camera { engine, device, event_rx, sample_rx, event_cb, sample_cb }; camera.wait_for_event(CaptureEngineEvent::Initialized); camera.prepare_source_sink(); camera @@ -59,7 +56,8 @@ impl Camera { pub fn wait_for_frame(&self) -> Option { self.sample_rx - .recv() + // TODO sometimes running two engines on the same camera breaks frame delivery, so wait not too long + .recv_timeout(Duration::from_secs(3)) .ok() .flatten() .and_then(|sample| { @@ -72,6 +70,30 @@ impl Camera { }) .map(|buffer: LockedBuffer| Frame { buffer }) } + + pub fn change_device(&mut self) { + let devices: Vec = enum_device_sources().into_iter().map(Device::new).collect(); + let Some(index) = devices.iter().position(|d| d.id() == self.device.id()) else { return }; + let new_index = (index + 1) % devices.len(); + + if new_index == index { + return; + } + let new_device = devices[new_index].clone(); + + let engine = new_capture_engine().unwrap(); + let (event_tx, event_rx) = channel::(); + let (sample_tx, sample_rx) = channel::>(); + let event_cb = CaptureEventCallback { event_tx }.into(); + let sample_cb = CaptureSampleCallback { sample_tx }.into(); + + init_capture_engine(&engine, Some(&new_device.source), &event_cb).unwrap(); + + *self = Camera { engine, device: new_device, event_rx, sample_rx, event_cb, sample_cb }; + self.wait_for_event(CaptureEngineEvent::Initialized); + self.prepare_source_sink(); + self.start(); // TODO watch out about playing state + } } impl Camera { diff --git a/src/win_mf/mf.rs b/src/win_mf/mf.rs index 8195234..70327b8 100644 --- a/src/win_mf/mf.rs +++ b/src/win_mf/mf.rs @@ -10,12 +10,12 @@ use super::media_type::MediaType; #[derive(Clone, Debug)] pub struct Device { - activate: IMFActivate, - source: IMFMediaSource, + pub activate: IMFActivate, + pub source: IMFMediaSource, } impl Device { - fn new(activate: IMFActivate) -> Self { + pub(crate) fn new(activate: IMFActivate) -> Self { co_initialize_multithreaded(); let source = unsafe { activate.ActivateObject().unwrap() }; Self { activate, source } @@ -31,7 +31,12 @@ impl PartialEq for Device { impl Drop for Device { fn drop(&mut self) { // unsafe { self.activate.ShutdownObject().unwrap() }; - println!("Device.drop done"); + } +} + +impl std::fmt::Display for Device { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_fmt(format_args!("Device({})", self.name())) } } @@ -421,7 +426,3 @@ pub fn co_initialize_multithreaded() { pub fn co_mta_usage() { let _ = unsafe { CoIncrementMTAUsage() }; } - -pub(crate) fn activate_to_media_source(activate: &IMFActivate) -> IMFMediaSource { - unsafe { activate.ActivateObject().unwrap() } -} diff --git a/tests/camera.rs b/tests/camera.rs index b4077aa..2946031 100644 --- a/tests/camera.rs +++ b/tests/camera.rs @@ -91,3 +91,16 @@ fn two_cameras_start_and_wait_for_frames() { println!("Camera 1 {:?}", camera1.wait_for_frame()); println!("Camera 2 {:?}", camera2.wait_for_frame()); } + +#[test] +fn change_device() { + let mut camera = Camera::new_default_device(); + camera.start(); + assert!(camera.wait_for_frame().is_some()); + assert!(camera.wait_for_frame().is_some()); + assert!(camera.wait_for_frame().is_some()); + camera.change_device(); + assert!(camera.wait_for_frame().is_some()); + assert!(camera.wait_for_frame().is_some()); + assert!(camera.wait_for_frame().is_some()); +}