feat: capture YUV422 video stream

This commit is contained in:
sup39 2024-09-04 03:31:17 +08:00
commit caa44c9f29
Signed by: sup39
GPG key ID: 111C00916C1641E5
23 changed files with 5505 additions and 0 deletions

1
.gitignore vendored Normal file
View file

@ -0,0 +1 @@
/target

4451
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

18
Cargo.toml Normal file
View file

@ -0,0 +1,18 @@
[workspace]
members = ["crates/*"]
resolver = "2"
[workspace.package]
edition = "2021"
[workspace.dependencies]
eframe = { version = "0.28.1", features = ["wgpu", "persistence"] }
env_logger = { version = "0.11.5", features = ["auto-color", "humantime"] }
eye-hal = { git = "https://github.com/raymanfx/eye-rs", rev = "5b7e3f7a1e79966091692896c568aab042e449ef", default-features = false }
ndarray = { version = "0.16.1", default-features = false, features = ["rayon"] }
log = { version = "0.4.22", default-features = false }
rayon = { version = "1.10.0", default-features = false }
serde = { version = "1.0.209", default-features = false }
thiserror = { version = "1.0.63", default-features = false }
png = { version = "0.17.13", default-features = false }
ron = { version = "0.8.1", default-features = false }

View file

@ -0,0 +1,16 @@
[package]
name = "sup-auto-split"
version = "0.1.0"
edition.workspace = true
[dependencies]
eframe.workspace = true
env_logger.workspace = true
eye-hal.workspace = true
log.workspace = true
ndarray.workspace = true
png.workspace = true
rayon.workspace = true
ron.workspace = true
serde = { workspace = true, features = ["derive"] }
thiserror.workspace = true

View file

@ -0,0 +1,55 @@
use eframe::egui;
pub struct App {
state: crate::AppState,
// ui
menu_bar: crate::ui::MenuBar,
config_window: crate::ui::ConfigWindow,
}
impl App {
pub fn new(cc: &eframe::CreationContext) -> Self {
let ctx = &cc.egui_ctx;
Self {
state: crate::AppState::load(ctx, cc.storage),
menu_bar: crate::ui::MenuBar::new(),
config_window: crate::ui::ConfigWindow::new(),
}
}
}
impl eframe::App for App {
fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) {
egui::TopBottomPanel::top("menu-bar").show(ctx, |ui| {
self.menu_bar.show(ui, &mut self.state);
});
if self.state.menu.is_config_opened {
self.config_window.show(ctx, &mut self.state);
}
egui::CentralPanel::default().show(ctx, |ui| {
ui.image(self.state.capture.texture());
let mut decode_latency_history = self.state.capture.decode_latency_history();
if let Some(dt_recent) = decode_latency_history.next() {
let (dt_count, dt_sum) = decode_latency_history
.fold((1usize, dt_recent), |(count, dt_sum), dt| {
(count + 1, dt_sum + dt)
});
// ui.label(format!("Decode latency: {:.2} ms", dt_recent));
if dt_count > 1 {
ui.label(format!(
"Average decode latency: {:.2} ms (recent {} frames)",
dt_sum / dt_count as f32,
dt_count,
));
}
}
});
}
fn save(&mut self, storage: &mut dyn eframe::Storage) {
self.state.save(storage);
}
}

View file

@ -0,0 +1,38 @@
use super::{Device, DeviceDescription, Result};
pub struct Context {
ctx: eye_hal::PlatformContext<'static>,
devices: Vec<DeviceDescription>,
}
impl Context {
pub fn new() -> Self {
let ctx = eye_hal::PlatformContext::default();
let devices = eye_hal::traits::Context::devices(&ctx)
.map(|devices| devices.into_iter().map(DeviceDescription::from).collect())
.inspect_err(|err| log::error!("Failed to list capture devices: {}", err))
.unwrap_or_default();
Self { ctx, devices }
}
pub fn devices(&self) -> &[DeviceDescription] {
&self.devices
}
pub fn refresh(&mut self) {
if let Ok(devices) = eye_hal::traits::Context::devices(&self.ctx)
.map(|devices| devices.into_iter().map(DeviceDescription::from).collect())
.inspect_err(|err| log::error!("Failed to list capture devices: {}", err))
{
self.devices = devices;
}
}
pub fn open_device(&self, uri: &str) -> Result<Device> {
eye_hal::traits::Context::open_device(&self.ctx, uri).and_then(Device::try_from)
}
}
impl Default for Context {
fn default() -> Self {
Self::new()
}
}

View file

@ -0,0 +1,54 @@
use eframe::egui::{Color32, ColorImage};
use rayon::prelude::*;
/// Takes `raw_frame` and `size` as input and outputs the decoded [egui::ColorImage].
/// Returns `None` if decode fails.
pub type FrameDecoder = fn(raw_frame: &[u8], size: [usize; 2]) -> Option<ColorImage>;
/// Decodes a YUV422 encoded frame.
pub fn decode_yuv422(raw_frame: &[u8], size: [usize; 2]) -> Option<ColorImage> {
let pixel_count = size[0] * size[1];
let mut pixels: Vec<Color32> = Vec::with_capacity(pixel_count);
let src_iter = raw_frame.par_chunks_exact(4);
let mut dst_iter = pixels.spare_capacity_mut().par_chunks_exact_mut(2);
// invalid pixel count
if src_iter.len() != dst_iter.len()
|| !src_iter.remainder().is_empty()
|| !dst_iter.remainder().is_empty()
{
return None;
}
dst_iter.zip_eq(src_iter).for_each(|(dst, src)| {
let (y0, u, y1, v) = (
src[0] as f32,
(src[1] as i16 - 128) as f32,
src[2] as f32,
(src[3] as i16 - 128) as f32,
);
/// ITU-R BT.709 conversion
/// y: [0, 1], u: [-0.5, 0.5], v: [-0.5, 0.5]
#[inline]
fn yuv_to_color32(y: f32, u: f32, v: f32) -> Color32 {
let r = y + 1.5748 * v;
let g = y - 0.1873 * u - 0.4681 * v;
let b = y + 1.8556 * u;
Color32::from_rgb(
r.clamp(0.0, 255.0) as u8,
g.clamp(0.0, 255.0) as u8,
b.clamp(0.0, 255.0) as u8,
)
}
dst[0].write(yuv_to_color32(y0, u, v));
dst[1].write(yuv_to_color32(y1, u, v));
});
unsafe {
pixels.set_len(pixel_count);
}
Some(ColorImage { size, pixels })
}

View file

@ -0,0 +1,61 @@
use super::{
Result, Stream, StreamDescriptor, SupportedStreamDescriptor, UnsupportedStreamDescriptor,
};
use serde::{Deserialize, Serialize};
#[derive(PartialEq, Serialize, Deserialize)]
pub struct DeviceDescription {
pub uri: String,
pub product: String,
}
impl From<eye_hal::device::Description> for DeviceDescription {
fn from(value: eye_hal::device::Description) -> Self {
Self {
uri: value.uri,
product: value.product,
}
}
}
impl std::fmt::Display for DeviceDescription {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_fmt(format_args!("{} ({})", self.uri, self.product))
}
}
pub struct Device {
device: eye_hal::platform::Device<'static>,
streams: Vec<SupportedStreamDescriptor>,
unsupported_streams: Vec<UnsupportedStreamDescriptor>,
}
impl Device {
pub fn streams(&self) -> &[SupportedStreamDescriptor] {
&self.streams
}
pub fn unsupported_streams(&self) -> &[UnsupportedStreamDescriptor] {
&self.unsupported_streams
}
pub fn start_stream(&self, descriptor: &SupportedStreamDescriptor) -> Result<Stream> {
eye_hal::traits::Device::start_stream(&self.device, &descriptor.into())
.map(|stream| Stream::new(stream, descriptor))
}
}
impl TryFrom<eye_hal::platform::Device<'static>> for Device {
type Error = eye_hal::Error;
fn try_from(
device: eye_hal::platform::Device<'static>,
) -> std::result::Result<Self, Self::Error> {
let mut streams = vec![];
let mut unsupported_streams = vec![];
for stream in eye_hal::traits::Device::streams(&device)? {
match StreamDescriptor::from(stream) {
StreamDescriptor::Supported(stream) => streams.push(stream),
StreamDescriptor::Unsupported(stream) => unsupported_streams.push(stream),
}
}
Ok(Self {
device,
streams,
unsupported_streams,
})
}
}

View file

@ -0,0 +1,15 @@
mod context;
mod decoder;
mod device;
mod pixel_format;
mod stream;
pub use context::Context;
pub use decoder::FrameDecoder;
pub use device::{Device, DeviceDescription};
pub use pixel_format::{PixelFormat, UnsupportedPixelFormat};
pub use stream::{
Stream, StreamDescriptor, SupportedStreamDescriptor, UnsupportedStreamDescriptor,
};
pub type Error = eye_hal::Error;
pub type Result<T> = eye_hal::Result<T>;

View file

@ -0,0 +1,51 @@
use super::decoder::{self, FrameDecoder};
use serde::{Deserialize, Serialize};
#[derive(PartialEq, Deserialize, Serialize)]
#[non_exhaustive]
pub enum PixelFormat {
Yuv422,
}
impl PixelFormat {
pub fn decoder(&self) -> FrameDecoder {
match self {
PixelFormat::Yuv422 => decoder::decode_yuv422,
}
}
}
impl std::fmt::Display for PixelFormat {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
PixelFormat::Yuv422 => f.write_str("YUV422"),
}
}
}
impl TryFrom<eye_hal::format::PixelFormat> for PixelFormat {
type Error = UnsupportedPixelFormat;
fn try_from(value: eye_hal::format::PixelFormat) -> std::result::Result<Self, Self::Error> {
match value {
eye_hal::format::PixelFormat::Custom(ref custom) => match custom.as_str() {
"YUYV" => Ok(Self::Yuv422),
_ => Err(UnsupportedPixelFormat(value)),
},
_ => Err(UnsupportedPixelFormat(value)),
}
}
}
impl From<&PixelFormat> for eye_hal::format::PixelFormat {
fn from(val: &PixelFormat) -> Self {
match val {
PixelFormat::Yuv422 => eye_hal::format::PixelFormat::Custom("YUYV".to_owned()),
}
}
}
pub struct UnsupportedPixelFormat(pub eye_hal::format::PixelFormat);
impl std::fmt::Display for UnsupportedPixelFormat {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_fmt(format_args!("{}", self.0))
}
}

View file

@ -0,0 +1,109 @@
use super::{FrameDecoder, PixelFormat, UnsupportedPixelFormat};
use serde::{Deserialize, Serialize};
pub struct Stream {
size: [usize; 2],
stream: eye_hal::platform::Stream<'static>,
decoder: FrameDecoder,
}
pub struct DecodedFrame {
pub image: eframe::egui::ColorImage,
pub timestamp: std::time::SystemTime,
}
impl Stream {
pub fn new(
stream: eye_hal::platform::Stream<'static>,
descriptor: &SupportedStreamDescriptor,
) -> Self {
Self {
size: [descriptor.width as usize, descriptor.height as usize],
stream,
decoder: descriptor.pixfmt.decoder(),
}
}
pub fn next_frame(&mut self) -> Option<DecodedFrame> {
while let Some(raw_frame_result) = eye_hal::traits::Stream::next(&mut self.stream) {
match raw_frame_result {
Err(err) => log::warn!("Failed to grab a frame from stream: {}", err),
Ok(raw_frame) => {
let timestamp = std::time::SystemTime::now();
match (self.decoder)(raw_frame, self.size) {
None => log::warn!("Failed to decode frame"),
Some(image) => return Some(DecodedFrame { image, timestamp }),
}
}
}
}
None
}
}
#[derive(Deserialize, Serialize)]
pub struct GenericStreamDescriptor<PixelFormat> {
pub width: u32,
pub height: u32,
pub pixfmt: PixelFormat,
pub fps: f32,
}
impl<PixelFormat> std::fmt::Display for GenericStreamDescriptor<PixelFormat>
where
PixelFormat: std::fmt::Display,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_fmt(format_args!(
"{}x{} @ {:.2} fps ({})",
self.width, self.height, self.fps, self.pixfmt,
))
}
}
pub type SupportedStreamDescriptor = GenericStreamDescriptor<PixelFormat>;
pub type UnsupportedStreamDescriptor = GenericStreamDescriptor<UnsupportedPixelFormat>;
pub enum StreamDescriptor {
Supported(SupportedStreamDescriptor),
Unsupported(UnsupportedStreamDescriptor),
}
impl From<eye_hal::stream::Descriptor> for StreamDescriptor {
fn from(value: eye_hal::stream::Descriptor) -> Self {
let width = value.width;
let height = value.height;
let fps = 1.0 / value.interval.as_secs_f32();
match PixelFormat::try_from(value.pixfmt) {
Ok(pixfmt) => StreamDescriptor::Supported(SupportedStreamDescriptor {
width,
height,
pixfmt,
fps,
}),
Err(pixfmt) => StreamDescriptor::Unsupported(UnsupportedStreamDescriptor {
width,
height,
pixfmt,
fps,
}),
}
}
}
impl From<&SupportedStreamDescriptor> for eye_hal::stream::Descriptor {
fn from(val: &SupportedStreamDescriptor) -> Self {
eye_hal::stream::Descriptor {
width: val.width,
height: val.height,
pixfmt: (&val.pixfmt).into(),
interval: std::time::Duration::from_secs_f32(1.0 / val.fps),
}
}
}
impl PartialEq for SupportedStreamDescriptor {
fn eq(&self, other: &Self) -> bool {
self.width == other.width
&& self.height == other.height
&& self.pixfmt == other.pixfmt
&& self.fps == other.fps
}
}

View file

@ -0,0 +1,8 @@
mod app;
mod capture;
mod state;
mod ui;
mod util;
pub use app::App;
pub use state::AppState;

View file

@ -0,0 +1,14 @@
use sup_auto_split::App;
fn main() -> eframe::Result {
env_logger::init();
let options = eframe::NativeOptions {
..Default::default()
};
eframe::run_native(
"sup-auto-split",
options,
Box::new(|cc| Ok(Box::new(App::new(cc)))),
)
}

View file

@ -0,0 +1,297 @@
use crate::{capture, util::AtomicU8Enum};
use eframe::{egui, Storage};
use serde::{Deserialize, Serialize};
use std::sync::{atomic, mpsc, Arc};
crate::derive_from_repr! {
#[repr(u8)]
#[derive(Clone, Copy, Default)]
pub enum CaptureStatus {
#[default] Stopped = 0,
Capturing = 1,
Stopping = 2,
}
}
#[derive(thiserror::Error, Debug)]
pub enum StartCaptureError {
#[error("Other stream has been opened. Please stop the opened stream first")]
OtherStreamOpened,
#[error("No device is selected")]
NoDeviceSelected,
#[error("No stream is selected")]
NoStreamSelected,
#[error("[BUG] The selected stream does not exist")]
StreamNotExists,
#[error("Failed to open stream: {0}")]
OpenStreamError(#[from] capture::Error),
}
pub struct CaptureState {
capture_ctx: capture::Context,
device_index: Option<usize>,
device: Option<capture::Device>,
stream_index: Option<usize>,
status: Arc<AtomicU8Enum<CaptureStatus>>,
is_auto_start_stream: bool,
// texture
egui_ctx: egui::Context,
texture_handle: egui::TextureHandle,
// decode latency
decode_latency_buffer: crate::util::CircularArrayBuffer<f32, 32>,
decode_latency_tx: mpsc::Sender<f32>,
decode_latency_rx: mpsc::Receiver<f32>,
}
impl CaptureState {
pub fn devices(&self) -> &[capture::DeviceDescription] {
self.capture_ctx.devices()
}
pub fn device_index(&self) -> Option<usize> {
self.device_index
}
pub fn set_device_index(&mut self, index: usize) -> capture::Result<()> {
// ignore request to open the same device
if self.device_index == Some(index) {
return Ok(());
}
// open the device
if let Some(device) = self
.capture_ctx
.devices()
.get(index)
.map(|desc| self.capture_ctx.open_device(&desc.uri))
.transpose()?
{
self.device = Some(device);
self.device_index = Some(index);
self.stream_index = None;
}
Ok(())
}
pub fn device(&self) -> Option<&capture::Device> {
self.device.as_ref()
}
pub fn stream_index(&self) -> Option<usize> {
self.stream_index
}
pub fn set_stream_index(&mut self, index: usize) {
// ignore request if a stream has already started
if !matches!(
self.status.load(atomic::Ordering::Relaxed),
CaptureStatus::Capturing
) {
self.stream_index = Some(index);
}
}
pub fn is_auto_start_stream_mut(&mut self) -> &mut bool {
&mut self.is_auto_start_stream
}
pub fn start_capture(&mut self) -> Result<(), StartCaptureError> {
// only start the selected stream if no other stream is running
if !matches!(
self.status.load(atomic::Ordering::Acquire),
CaptureStatus::Stopped
) {
return Err(StartCaptureError::OtherStreamOpened);
}
let device = self
.device
.as_ref()
.ok_or(StartCaptureError::NoDeviceSelected)?;
let stream_index = self
.stream_index
.ok_or(StartCaptureError::NoStreamSelected)?;
let stream_desc = device
.streams()
.get(stream_index)
.ok_or(StartCaptureError::StreamNotExists)?;
let stream = device.start_stream(stream_desc)?;
self.status
.store(CaptureStatus::Capturing, atomic::Ordering::Release);
let egui_ctx = self.egui_ctx.clone();
let mut texture_handle = self.texture_handle.clone();
let status = self.status.clone();
let decode_latency_tx = self.decode_latency_tx.clone();
std::thread::spawn(move || {
{
let mut stream = stream;
while matches!(
status.load(atomic::Ordering::Relaxed),
CaptureStatus::Capturing
) {
match stream.next_frame() {
None => break,
Some(frame) => {
texture_handle.set(frame.image, Default::default());
if let Ok(dt) =
std::time::SystemTime::now().duration_since(frame.timestamp)
{
decode_latency_tx.send(dt.as_secs_f32() * 1000.0).ok();
}
egui_ctx.request_repaint();
}
}
}
}
// stream has been dropped here
status.store(CaptureStatus::Stopped, atomic::Ordering::Release);
egui_ctx.request_repaint();
});
Ok(())
}
pub fn stop_capture(&mut self) {
self.status
.compare_exchange(
CaptureStatus::Capturing.into(),
CaptureStatus::Stopping.into(),
atomic::Ordering::Acquire,
atomic::Ordering::Relaxed,
)
.ok();
}
pub fn status(&self) -> CaptureStatus {
self.status.load(atomic::Ordering::Relaxed)
}
pub fn texture(&self) -> &egui::TextureHandle {
&self.texture_handle
}
/// Returns a iterator of the decode latency history
/// in unit of milliseconds from the newest to the oldest.
pub fn decode_latency_history(&mut self) -> impl std::iter::Iterator<Item = f32> + '_ {
while let Ok(dt) = self.decode_latency_rx.try_recv() {
self.decode_latency_buffer.push(dt);
}
self.decode_latency_buffer
.iter()
.filter(|dt| !dt.is_nan())
.copied()
}
}
#[derive(Serialize)]
struct CaptureSaveState<'a> {
device_uri: Option<&'a str>,
stream: Option<&'a capture::SupportedStreamDescriptor>,
is_auto_start_stream: bool,
}
#[derive(Deserialize)]
struct CaptureLoadState<'a> {
device_uri: Option<&'a str>,
stream: Option<capture::SupportedStreamDescriptor>,
is_auto_start_stream: bool,
}
impl<'a> From<&'a CaptureState> for CaptureSaveState<'a> {
fn from(state: &'a CaptureState) -> Self {
Self {
device_uri: state
.device_index
.and_then(|index| state.devices().get(index))
.map(|device| device.uri.as_str()),
stream: state.stream_index.and_then(|stream_index| {
state
.device
.as_ref()
.and_then(|device| device.streams().get(stream_index))
}),
is_auto_start_stream: state.is_auto_start_stream,
}
}
}
impl CaptureState {
const STORAGE_KEY: &'static str = "config.capture";
pub fn load(egui_ctx: &egui::Context, storage: Option<&dyn eframe::Storage>) -> Self {
let texture_handle = egui_ctx.load_texture(
"captured-frame",
egui::ColorImage::new([1, 1], egui::Color32::TRANSPARENT),
Default::default(),
);
let (decode_latency_tx, decode_latency_rx) = mpsc::channel();
let mut me = Self {
capture_ctx: Default::default(),
device_index: None,
device: None,
stream_index: None,
is_auto_start_stream: false,
status: Arc::new(Default::default()),
// texture
texture_handle,
egui_ctx: egui_ctx.clone(),
// decode latency
decode_latency_buffer: crate::util::CircularArrayBuffer::from([f32::NAN; 32]),
decode_latency_tx,
decode_latency_rx,
};
if let Some(save_state) = storage
.and_then(|storage| storage.get_string(Self::STORAGE_KEY))
.as_deref()
.and_then(|s| {
ron::from_str::<CaptureLoadState>(s)
.inspect_err(|err| log::warn!("Failed to parse CaptureConfig: {}", err))
.ok()
})
{
me.is_auto_start_stream = save_state.is_auto_start_stream;
if let Some(device_uri) = save_state.device_uri {
if let Some(device_index) = me
.capture_ctx
.devices()
.iter()
.enumerate()
.find_map(|(index, device)| (device.uri == device_uri).then_some(index))
{
if let Ok(device) = me.capture_ctx.open_device(device_uri) {
// try stream
if let Some(saved_stream) = save_state.stream {
if let Some(stream_index) = device
.streams()
.iter()
.enumerate()
.find_map(|(index, stream)| {
(stream == &saved_stream).then_some(index)
})
{
me.stream_index = Some(stream_index);
}
}
// set device (the set is performed here due to ownership problems)
me.device = Some(device);
me.device_index = Some(device_index);
if me.is_auto_start_stream {
me.start_capture()
.inspect_err(|err| log::warn!("Failed to auto start capture: {err}"))
.ok();
}
}
}
}
}
me
}
pub fn save(&self, storage: &mut dyn Storage) {
match ron::to_string(&CaptureSaveState::from(self)) {
Ok(s) => storage.set_string(Self::STORAGE_KEY, s),
Err(err) => log::warn!("Failed to save CaptureConfig: {}", err),
}
}
}

View file

@ -0,0 +1,39 @@
use eframe::Storage;
use serde::{Deserialize, Serialize};
#[derive(Deserialize, Serialize)]
pub struct MenuState {
pub is_config_opened: bool,
}
impl Default for MenuState {
fn default() -> Self {
Self {
is_config_opened: true,
}
}
}
impl MenuState {
const STORAGE_KEY: &'static str = "state.menu";
// TODO partial
pub fn load(storage: Option<&dyn eframe::Storage>) -> Self {
storage
.and_then(|storage| storage.get_string(Self::STORAGE_KEY))
.as_deref()
.and_then(|s| {
ron::from_str::<MenuState>(s)
.inspect_err(|err| log::warn!("Failed to parse MenuState: {}", err))
.ok()
})
.unwrap_or_default()
}
pub fn save(&self, storage: &mut dyn Storage) {
match ron::to_string(&self) {
Ok(s) => storage.set_string(Self::STORAGE_KEY, s),
Err(err) => log::warn!("Failed to save MenuState: {}", err),
}
}
}

View file

@ -0,0 +1,25 @@
mod capture;
mod menu;
pub use capture::{CaptureState, CaptureStatus};
pub use menu::MenuState;
pub struct AppState {
pub capture: CaptureState,
pub menu: MenuState,
}
use eframe::Storage;
impl AppState {
pub fn load(ctx: &eframe::egui::Context, storage: Option<&dyn Storage>) -> Self {
Self {
capture: CaptureState::load(ctx, storage),
menu: MenuState::load(storage),
}
}
pub fn save(&self, storage: &mut dyn Storage) {
self.capture.save(storage);
self.menu.save(storage);
}
}

View file

@ -0,0 +1,102 @@
use crate::{state::CaptureState, AppState};
use eframe::egui;
pub struct ConfigWindow {
capture_config_section: CaptureConfigSection,
}
impl ConfigWindow {
pub fn new() -> Self {
Self {
capture_config_section: CaptureConfigSection::new(),
}
}
pub fn show(&mut self, ctx: &egui::Context, state: &mut AppState) {
egui::Window::new("Config").show(ctx, |ui| {
ui.add_enabled_ui(
matches!(state.capture.status(), crate::state::CaptureStatus::Stopped),
|ui| {
self.capture_config_section.show(ui, &mut state.capture);
},
);
});
}
}
pub struct CaptureConfigSection {
message: String,
}
impl CaptureConfigSection {
pub fn new() -> Self {
Self {
message: "".to_owned(),
}
}
}
impl CaptureConfigSection {
pub fn show(&mut self, ui: &mut egui::Ui, state: &mut CaptureState) {
ui.heading("Video Capture");
let devices = state.devices();
let mut selected_device_index = state.device_index();
egui::ComboBox::new("capture-device-list", "Device")
.selected_text(
state
.device_index()
.and_then(|index| devices.get(index))
.map(ToString::to_string)
.unwrap_or_default(),
)
.show_ui(ui, |ui| {
for (device_index, device) in devices.iter().enumerate() {
ui.selectable_value(
&mut selected_device_index,
Some(device_index),
device.to_string(),
);
}
});
// update selected device
if let Some(index) = selected_device_index {
if let Err(err) = state.set_device_index(index) {
self.message = err.to_string();
}
}
if let Some(device) = state.device() {
let streams = device.streams();
let mut selected_stream_index = None;
egui::ComboBox::new("capture-device-stream-list", "Stream")
.selected_text(
state
.stream_index()
.and_then(|index| streams.get(index))
.map(ToString::to_string)
.unwrap_or_default(),
)
.show_ui(ui, |ui| {
for (stream_index, stream) in streams.iter().enumerate() {
ui.selectable_value(
&mut selected_stream_index,
Some(stream_index),
stream.to_string(),
);
}
for unsupported_stream in device.unsupported_streams() {
ui.label(format!("[Unsupported] {}", unsupported_stream));
}
});
// update selected stream
if let Some(index) = selected_stream_index {
state.set_stream_index(index);
}
}
ui.toggle_value(state.is_auto_start_stream_mut(), "Start capturing automatically");
if !self.message.is_empty() {
ui.colored_label(egui::Color32::DARK_RED, &self.message);
}
}
}

View file

@ -0,0 +1,41 @@
use crate::state::CaptureStatus;
use eframe::egui;
pub struct MenuBar;
impl MenuBar {
pub fn new() -> Self {
Self
}
pub fn show(&mut self, ui: &mut egui::Ui, state: &mut crate::state::AppState) {
ui.menu_button("Capture", |ui| {
ui.toggle_value(&mut state.menu.is_config_opened, "Config");
match state.capture.status() {
CaptureStatus::Stopped => {
if ui.button("Start").clicked() {
if let Err(err) = state.capture.start_capture() {
log::warn!("Failed to start capture: {}", err);
// TODO show message
// state.capture_message = err.to_string();
} else {
// state.capture_message.clear();
}
ui.close_menu();
}
}
CaptureStatus::Capturing => {
if ui.button("Stop").clicked() {
state.capture.stop_capture();
ui.close_menu();
}
}
CaptureStatus::Stopping => {
ui.add_enabled(false, egui::Button::new("Stopping..."));
}
};
// ui.colored_label(egui::Color32::DARK_RED, &self.capture_message);
});
}
}

View file

@ -0,0 +1,5 @@
mod config_window;
mod menu_bar;
pub use config_window::ConfigWindow;
pub use menu_bar::MenuBar;

View file

@ -0,0 +1,34 @@
pub struct AtomicU8Enum<T: From<u8> + Into<u8>> {
inner: std::sync::atomic::AtomicU8,
phantom: std::marker::PhantomData<T>,
}
impl<T: From<u8> + Into<u8>> AtomicU8Enum<T> {
pub fn load(&self, order: std::sync::atomic::Ordering) -> T {
self.inner.load(order).into()
}
pub fn store(&self, val: T, order: std::sync::atomic::Ordering) {
self.inner.store(val.into(), order)
}
}
impl<T: From<u8> + Into<u8> + Default> Default for AtomicU8Enum<T> {
fn default() -> Self {
Self {
inner: std::sync::atomic::AtomicU8::new(T::default().into()),
phantom: std::marker::PhantomData,
}
}
}
impl<T: From<u8> + Into<u8>> std::ops::Deref for AtomicU8Enum<T> {
type Target = std::sync::atomic::AtomicU8;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<T: From<u8> + Into<u8>> From<T> for AtomicU8Enum<T> {
fn from(value: T) -> Self {
Self {
inner: std::sync::atomic::AtomicU8::new(value.into()),
phantom: std::marker::PhantomData,
}
}
}

View file

@ -0,0 +1,32 @@
pub struct CircularArrayBuffer<T, const N: usize> {
inner: [T; N],
next_index: usize,
}
impl<T, const N: usize> CircularArrayBuffer<T, N> {
pub fn push(&mut self, value: T) {
self.inner[self.next_index] = value;
self.next_index = (self.next_index + 1) % N;
}
pub fn iter(&self) -> std::iter::Chain<std::slice::Iter<'_, T>, std::slice::Iter<'_, T>> {
self[self.next_index..]
.iter()
.chain(self[..self.next_index].iter())
}
}
impl<T, const N: usize> From<[T; N]> for CircularArrayBuffer<T, N> {
fn from(inner: [T; N]) -> Self {
Self {
inner,
next_index: 0,
}
}
}
impl<T, const N: usize> std::ops::Deref for CircularArrayBuffer<T, N> {
type Target = [T; N];
fn deref(&self) -> &Self::Target {
&self.inner
}
}

View file

@ -0,0 +1,33 @@
#[macro_export]
macro_rules! derive_from_repr {
(
#[repr($repr_ty:ty)]
$(#[$enum_attrs:meta])*
$enum_vis:vis enum $enum_ident:ident {
$($(#[$variant_attrs:meta])* $variant_ident:ident = $variant_value:literal),*
$(,)?
}
) => {
$(#[$enum_attrs])*
#[repr($repr_ty)]
$enum_vis enum $enum_ident {
$($(#[$variant_attrs])* $variant_ident = $variant_value),*
}
impl ::std::convert::From<$repr_ty> for $enum_ident {
fn from(value: $repr_ty) -> $enum_ident {
match value {
$($variant_value => $enum_ident::$variant_ident),*,
_ => ::std::default::Default::default(),
}
}
}
impl ::std::convert::From<$enum_ident> for $repr_ty {
fn from(value: $enum_ident) -> $repr_ty {
match value {
$($enum_ident::$variant_ident => $variant_value),*
}
}
}
};
}

View file

@ -0,0 +1,6 @@
mod atomic;
mod circular_buffer;
mod derive;
pub use atomic::AtomicU8Enum;
pub use circular_buffer::CircularArrayBuffer;