Initial commit of p2p-chat GUI with native audio and screen sharing

This commit is contained in:
mixa
2026-02-18 11:51:00 +03:00
parent a3cb75489a
commit 65b3f22dae
16 changed files with 6559 additions and 695 deletions

4138
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -32,7 +32,7 @@ anyhow = "1"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
clap = { version = "4", features = ["derive"] }
rand = "0.8"
rand = "0.9"
# Configuration
toml = "0.7"
@@ -42,6 +42,8 @@ directories = "5.0"
songbird = { version = "0.4", features = ["builtin-queue"] }
audiopus = "0.2"
rfd = "0.14"
iced = "0.13"
iced_futures = "0.13"
crossbeam-channel = "0.5"
axum = { version = "0.8.8", features = ["ws"] }
@@ -51,6 +53,12 @@ futures = "0.3.31"
tower-http = { version = "0.6.8", features = ["fs", "cors"] }
mime_guess = "2.0.5"
hex = "0.4.3"
cpal = { version = "0.17.1", features = ["jack"] }
xcap = "0.8.2"
image = "0.25.9"
ringbuf = "0.4.8"
nnnoiseless = "0.5"
dashmap = "5"
[profile.dev]
opt-level = 0

View File

@@ -1,11 +1,64 @@
use crate::chat::ChatState;
use crate::chat::{ChatEntry, ChatState};
use crate::config::AppConfig;
use crate::file_transfer::FileTransferManager;
use crate::media::MediaState;
use crate::net::{NetEvent, NetworkManager};
use crate::net::{NetEvent, NetworkManager, PeerInfo};
use crate::protocol::{self, GossipMessage};
use crate::tui::TuiCommand;
use anyhow::Result;
use std::path::PathBuf;
#[derive(Debug, Clone)]
pub struct FrontendState {
pub chat_history: Vec<ChatEntry>,
pub peers: Vec<PeerInfo>,
pub our_name: String,
pub our_id: String,
pub our_id_full: String,
pub media_status: String,
pub input_device_name: Option<String>,
pub output_device_name: Option<String>,
pub master_volume: f32,
pub noise_suppression: bool,
}
impl Default for FrontendState {
fn default() -> Self {
Self {
chat_history: Vec::new(),
peers: Vec::new(),
our_name: "Unknown".to_string(),
our_id: "".to_string(),
our_id_full: "".to_string(),
media_status: "".to_string(),
input_device_name: None,
output_device_name: None,
master_volume: 1.0,
noise_suppression: true,
}
}
}
#[derive(Debug)]
pub enum AppCommand {
SendMessage(String),
/// Local-only system message (not broadcast to peers).
SystemMessage(String),
SendFile(PathBuf),
AcceptFile(String), // file_id prefix
ChangeNick(String),
Connect(String),
ToggleVoice,
ToggleScreen,
SetInputDevice(String),
SetOutputDevice(String),
SetMasterVolume(f32),
ToggleNoiseCancel,
SetBitrate(u32),
Leave,
Quit,
None,
}
pub struct AppLogic {
pub chat: ChatState,
@@ -37,29 +90,60 @@ impl AppLogic {
}
}
pub async fn handle_tui_command(&mut self, cmd: TuiCommand) -> Result<bool> {
pub async fn handle_command(&mut self, cmd: AppCommand) -> Result<bool> {
match cmd {
TuiCommand::SendMessage(text) => {
AppCommand::SendMessage(text) => {
if let Err(e) = self.chat.send_message(text, &self.net).await {
self.chat.add_system_message(format!("Send error: {}", e));
}
}
TuiCommand::SystemMessage(text) => {
AppCommand::SystemMessage(text) => {
self.chat.add_system_message(text);
}
TuiCommand::ToggleVoice => {
AppCommand::ToggleVoice => {
let status = self.media.toggle_voice(self.net.clone()).await;
self.chat.add_system_message(status.to_string());
}
TuiCommand::ToggleCamera => {
let status = self.media.toggle_camera(self.net.clone()).await;
self.chat.add_system_message(status.to_string());
}
TuiCommand::ToggleScreen => {
AppCommand::ToggleScreen => {
let status = self.media.toggle_screen(self.net.clone()).await;
self.chat.add_system_message(status.to_string());
}
TuiCommand::Quit => {
AppCommand::SetInputDevice(device_name) => {
self.media.set_input_device(device_name.clone());
self.chat.add_system_message(format!("Microphone set to: {}", device_name));
if let Ok(mut cfg) = AppConfig::load() {
cfg.media.input_device = Some(device_name);
let _ = cfg.save();
}
}
AppCommand::SetOutputDevice(device_name) => {
self.media.set_output_device(device_name.clone());
self.chat.add_system_message(format!("Output set to: {}", device_name));
if let Ok(mut cfg) = AppConfig::load() {
cfg.media.output_device = Some(device_name);
let _ = cfg.save();
}
}
AppCommand::SetMasterVolume(vol) => {
self.media.set_volume(vol);
if let Ok(mut cfg) = AppConfig::load() {
cfg.media.master_volume = vol;
let _ = cfg.save();
}
}
AppCommand::ToggleNoiseCancel => {
if let Some(enabled) = self.media.toggle_denoise() {
let status = if enabled { "enabled" } else { "disabled" };
self.chat.add_system_message(format!("Noise cancellation {}", status));
if let Ok(mut cfg) = AppConfig::load() {
cfg.media.noise_suppression = enabled;
let _ = cfg.save();
}
} else {
self.chat.add_system_message("Voice chat not active".to_string());
}
}
AppCommand::Quit => {
// Broadcast disconnect to peers
let disconnect_msg = GossipMessage::Disconnect {
sender_name: self.our_name.clone(),
@@ -70,7 +154,7 @@ impl AppLogic {
self.media.shutdown();
return Ok(true); // Signal to quit
}
TuiCommand::ChangeNick(new_nick) => {
AppCommand::ChangeNick(new_nick) => {
let old = self.our_name.clone();
self.our_name = new_nick.clone();
self.chat.our_name = new_nick.clone();
@@ -82,7 +166,7 @@ impl AppLogic {
}
}
self.chat
.add_system_message(format!("Nickname changed: {} {}", old, new_nick));
.add_system_message(format!("Nickname changed: {} ➡️ {}", old, new_nick));
// Broadcast name change to all peers
let msg = GossipMessage::NameChange(protocol::NameChange {
old_name: old,
@@ -90,7 +174,7 @@ impl AppLogic {
});
let _ = self.net.broadcast(&msg).await;
}
TuiCommand::Connect(peer_id_str) => {
AppCommand::Connect(peer_id_str) => {
match peer_id_str.parse::<crate::net::EndpointId>() {
Ok(peer_id) => {
self.chat
@@ -109,7 +193,7 @@ impl AppLogic {
}
}
}
TuiCommand::SendFile(path) => {
AppCommand::SendFile(path) => {
self.chat
.add_system_message(format!("Preparing to send file: {:?}", path));
if !path.exists() {
@@ -144,7 +228,7 @@ impl AppLogic {
}
}
}
TuiCommand::Leave => {
AppCommand::Leave => {
self.chat
.add_system_message("Leaving group chat...".to_string());
self.media.shutdown();
@@ -158,7 +242,7 @@ impl AppLogic {
);
}
TuiCommand::SetBitrate(bps) => {
AppCommand::SetBitrate(bps) => {
self.media.set_bitrate(bps);
self.chat
.add_system_message(format!("🎵 Bitrate set to {} kbps", bps / 1000));
@@ -169,23 +253,25 @@ impl AppLogic {
}
}
TuiCommand::AcceptFile(prefix) => {
AppCommand::AcceptFile(prefix) => {
// Find matching transfer
let transfers = self.file_mgr.transfers.lock().unwrap();
let mut matched = None;
for (id, info) in transfers.iter() {
let id_str = hex::encode(id);
if id_str.starts_with(&prefix) {
if matched.is_some() {
self.chat
.add_system_message(format!("Ambiguous prefix '{}'", prefix));
matched = None;
break;
let matched = {
let transfers = self.file_mgr.transfers.lock().unwrap();
let mut matched = None;
for (id, info) in transfers.iter() {
let id_str = hex::encode(id);
if id_str.starts_with(&prefix) {
if matched.is_some() {
self.chat
.add_system_message(format!("Ambiguous prefix '{}'", prefix));
matched = None;
break;
}
matched = Some((*id, info.clone()));
}
matched = Some((*id, info.clone()));
}
}
drop(transfers);
matched
};
if let Some((_id, info)) = matched {
if let Some(peer_id) = info.peer {
@@ -221,7 +307,7 @@ impl AppLogic {
.add_system_message(format!("No transfer found matching '{}'", prefix));
}
}
TuiCommand::None => {}
AppCommand::None => {}
}
Ok(false) // Do not quit
}
@@ -418,4 +504,31 @@ impl AppLogic {
}
}
}
pub async fn get_frontend_state(&self) -> FrontendState {
let peers_map = self.net.peers.lock().await;
let mut peers: Vec<PeerInfo> = peers_map.values().cloned().collect();
peers.sort_by(|a, b| a.id.to_string().cmp(&b.id.to_string()));
// Sync audio levels
let levels = self.media.get_peer_levels();
for peer in &mut peers {
if let Some(level) = levels.get(&peer.id) {
peer.audio_level = *level;
}
}
FrontendState {
chat_history: self.chat.history.clone(),
peers,
our_name: self.our_name.clone(),
our_id: self.our_id_short.clone(),
our_id_full: self.net.our_id.to_string(),
media_status: self.media.status_line(),
input_device_name: self.media.input_device.clone(),
output_device_name: self.media.output_device.clone(),
master_volume: self.media.get_volume(),
noise_suppression: self.media.is_denoise_enabled(),
}
}
}

View File

@@ -5,7 +5,7 @@ use crate::protocol::{ChatMessage, GossipMessage};
use anyhow::Result;
/// Stored chat entry with display metadata.
#[derive(Debug, Clone)]
#[derive(Debug, Clone, serde::Serialize)]
pub struct ChatEntry {
pub sender_name: String,
pub timestamp: u64,

View File

@@ -61,16 +61,36 @@ impl Default for NetworkConfig {
pub struct MediaConfig {
#[serde(default = "default_bitrate")]
pub mic_bitrate: u32,
#[serde(default)]
pub input_device: Option<String>,
#[serde(default)]
pub output_device: Option<String>,
#[serde(default = "default_volume")]
pub master_volume: f32,
#[serde(default = "default_true")]
pub noise_suppression: bool,
}
fn default_bitrate() -> u32 {
128000
}
fn default_volume() -> f32 {
1.0
}
fn default_true() -> bool {
true
}
impl Default for MediaConfig {
fn default() -> Self {
Self {
mic_bitrate: 128000,
input_device: None,
output_device: None,
master_volume: 1.0,
noise_suppression: true,
}
}
}

610
src/gui.rs Normal file
View File

@@ -0,0 +1,610 @@
use iced::widget::{
button, checkbox, column, container, pick_list, row, scrollable, slider, text, text_input, Column,
};
use iced::{
Alignment, Background, Border, Color, Element, Length, Subscription, Task, Theme,
};
use std::sync::Arc;
use tokio::sync::{mpsc, Mutex};
use futures::stream;
use crate::app_logic::{AppCommand, FrontendState};
use crate::net::PeerInfo;
use crate::chat::ChatEntry;
use chrono::{DateTime, Local, TimeZone, Utc};
// Discord-like Colors
const BG_DARK: Color = Color::from_rgb(0.21, 0.22, 0.25); // #36393f
const SIDEBAR_DARK: Color = Color::from_rgb(0.18, 0.19, 0.21); // #2f3136
const INPUT_BG: Color = Color::from_rgb(0.25, 0.27, 0.29); // #40444b
const TEXT_COLOR: Color = Color::from_rgb(0.86, 0.86, 0.86); // #dcddde
const MUTED_TEXT: Color = Color::from_rgb(0.45, 0.46, 0.48); // #72767d
pub struct ChatApp {
state: FrontendState,
input_value: String,
command_sender: mpsc::Sender<AppCommand>,
// We keep the receiver in the struct to use it in subscription
state_receiver: Arc<Mutex<mpsc::Receiver<FrontendState>>>,
// Voice Chat State
input_devices: Vec<String>,
selected_device: Option<String>,
output_devices: Vec<String>,
selected_output_device: Option<String>,
is_in_voice: bool,
master_volume: f32,
noise_cancel_enabled: bool,
}
#[derive(Debug, Clone)]
pub enum Message {
InputChanged(String),
SendMessage,
BackendUpdate(FrontendState),
// Voice
InputDeviceSelected(String),
OutputDeviceSelected(String),
ToggleVoice,
ToggleScreen,
RefreshDevices,
MasterVolumeChanged(f32),
ToggleNoiseCancel(bool),
CopyText(String),
NoOp,
}
pub struct Flags {
pub initial_state: FrontendState,
pub command_sender: mpsc::Sender<AppCommand>,
pub state_receiver: mpsc::Receiver<FrontendState>,
}
impl ChatApp {
pub fn new(flags: Flags) -> (Self, Task<Message>) {
let master_volume = flags.initial_state.master_volume;
let noise_cancel_enabled = flags.initial_state.noise_suppression;
(
Self {
state: flags.initial_state,
input_value: String::new(),
command_sender: flags.command_sender,
state_receiver: Arc::new(Mutex::new(flags.state_receiver)),
input_devices: Vec::new(),
selected_device: None,
output_devices: Vec::new(),
selected_output_device: None,
is_in_voice: false,
master_volume,
noise_cancel_enabled,
},
Task::perform(async {}, |_| Message::RefreshDevices),
)
}
pub fn title(&self) -> String {
format!("P2P Chat - {} ({})", self.state.our_name, self.state.our_id)
}
pub fn update(&mut self, message: Message) -> Task<Message> {
match message {
Message::InputChanged(value) => {
self.input_value = value;
Task::none()
}
Message::SendMessage => {
let input_text = self.input_value.trim().to_string();
if input_text.is_empty() {
return Task::none();
}
self.input_value.clear();
// Simple command parsing
let command = if input_text.starts_with('/') {
// ... same as before ...
let parts: Vec<&str> = input_text.split_whitespace().collect();
match parts.as_slice() {
["/nick", name] | ["/name", name] => {
Some(AppCommand::ChangeNick(name.to_string()))
}
["/connect", id] | ["/join", id] => {
Some(AppCommand::Connect(id.to_string()))
}
["/voice"] => Some(AppCommand::ToggleVoice),
["/quit"] => Some(AppCommand::Quit),
_ => Some(AppCommand::SystemMessage(format!("Unknown command: {}", input_text))),
}
} else {
Some(AppCommand::SendMessage(input_text.clone()))
};
let sender = self.command_sender.clone();
Task::perform(
async move {
if let Some(cmd) = command {
let _ = sender.send(cmd).await;
}
},
|_| Message::InputChanged(String::new()),
)
}
Message::BackendUpdate(new_state) => {
// Check if voice status changed to update UI state if needed
// Currently FrontendState doesn't explicitly have voice status bool,
// but we can infer or add it. For now, rely on local toggle state or messages.
// Actually FrontendState has `media_status` string.
if new_state.media_status.contains("🎤 LIVE") {
self.is_in_voice = true;
} else {
self.is_in_voice = false;
}
// Update selected devices from backend state if they are set there
if let Some(dev) = &new_state.input_device_name {
if self.selected_device.as_ref() != Some(dev) {
self.selected_device = Some(dev.clone());
}
}
if let Some(dev) = &new_state.output_device_name {
if self.selected_output_device.as_ref() != Some(dev) {
self.selected_output_device = Some(dev.clone());
}
}
self.state = new_state;
Task::none()
}
Message::InputDeviceSelected(device) => {
self.selected_device = Some(device.clone());
let sender = self.command_sender.clone();
Task::perform(
async move {
let _ = sender.send(AppCommand::SetInputDevice(device)).await;
},
|_| Message::InputChanged(String::new()), // Dummy message
)
}
Message::OutputDeviceSelected(device) => {
self.selected_output_device = Some(device.clone());
let sender = self.command_sender.clone();
Task::perform(
async move {
let _ = sender.send(AppCommand::SetOutputDevice(device)).await;
},
|_| Message::InputChanged(String::new()), // Dummy message
)
}
Message::ToggleVoice => {
let sender = self.command_sender.clone();
Task::perform(
async move {
let _ = sender.send(AppCommand::ToggleVoice).await;
},
|_| Message::InputChanged(String::new()), // Dummy
)
}
Message::ToggleScreen => {
let sender = self.command_sender.clone();
Task::perform(
async move {
let _ = sender.send(AppCommand::ToggleScreen).await;
},
|_| Message::InputChanged(String::new()), // Dummy
)
}
Message::MasterVolumeChanged(vol) => {
self.master_volume = vol;
let sender = self.command_sender.clone();
Task::perform(
async move {
let _ = sender.send(AppCommand::SetMasterVolume(vol)).await;
},
|_| Message::NoOp,
)
}
Message::ToggleNoiseCancel(enabled) => {
self.noise_cancel_enabled = enabled;
let sender = self.command_sender.clone();
Task::perform(
async move {
let _ = sender.send(AppCommand::ToggleNoiseCancel).await;
},
|_| Message::NoOp,
)
}
Message::RefreshDevices => {
// Use the improved device filtering from MediaState logic
use cpal::traits::{HostTrait, DeviceTrait};
// Prioritize JACK if available
let available_hosts = cpal::available_hosts();
let mut hosts = Vec::new();
if available_hosts.contains(&cpal::HostId::Jack) {
hosts.push(cpal::host_from_id(cpal::HostId::Jack).unwrap());
}
hosts.push(cpal::default_host());
let mut input_names = Vec::new();
let mut output_names = Vec::new();
for host in &hosts {
if let Ok(devices) = host.input_devices() {
for device in devices {
if let Ok(name) = device.name() {
if name.contains("dmix") || name.contains("dsnoop") || name.contains("null") {
continue;
}
let clean_name = if let Some(start) = name.find("CARD=") {
let rest = &name[start + 5..];
let card_name = rest.split(',').next().unwrap_or(rest);
let prefix = name.split(':').next().unwrap_or("Unknown");
format!("{} ({})", card_name, prefix)
} else if name.contains("HDA Intel PCH") {
name
} else {
name
};
input_names.push(clean_name);
}
}
}
if let Ok(devices) = host.output_devices() {
for device in devices {
if let Ok(name) = device.name() {
if name.contains("dmix") || name.contains("dsnoop") || name.contains("null") {
continue;
}
let clean_name = if let Some(start) = name.find("CARD=") {
let rest = &name[start + 5..];
let card_name = rest.split(',').next().unwrap_or(rest);
let prefix = name.split(':').next().unwrap_or("Unknown");
format!("{} ({})", card_name, prefix)
} else {
name
};
output_names.push(clean_name);
}
}
}
}
input_names.sort();
input_names.dedup();
output_names.sort();
output_names.dedup();
self.input_devices = input_names;
self.output_devices = output_names;
if self.selected_device.is_none() && !self.input_devices.is_empty() {
// Pre-select first
self.selected_device = Some(self.input_devices[0].clone());
// We don't auto-send command to avoid loop, user must select or we wait for backend state
}
if self.selected_output_device.is_none() && !self.output_devices.is_empty() {
self.selected_output_device = Some(self.output_devices[0].clone());
}
Task::none()
}
Message::CopyText(text) => {
iced::clipboard::write(text)
}
Message::NoOp => Task::none(),
}
}
pub fn view(&self) -> Element<Message> {
// Chat Area
let chat_content = self.state.chat_history.iter().fold(
Column::new().spacing(10).padding(20),
|column, entry| column.push(view_chat_entry(entry)),
);
let chat_scroll = scrollable(chat_content)
.height(Length::Fill)
.width(Length::Fill)
.id(scrollable::Id::new("chat_scroll"));
// Input Area
let input = text_input("Message #general", &self.input_value)
.on_input(Message::InputChanged)
.on_submit(Message::SendMessage)
.padding(12)
.style(|_theme, status| {
text_input::Style {
background: Background::Color(INPUT_BG),
border: Border {
radius: 8.0.into(),
width: 0.0,
color: Color::TRANSPARENT,
},
icon: Color::WHITE,
placeholder: MUTED_TEXT,
value: TEXT_COLOR,
selection: Color::from_rgb(0.4, 0.5, 0.8),
}
});
let input_container = container(input)
.padding(15)
.style(|_theme: &Theme| container::Style {
background: Some(Background::Color(BG_DARK)),
..Default::default()
});
// Sidebar (Peers + Voice)
let identity_section = column![
text("MY IDENTITY").size(12).style(|_theme: &Theme| text::Style { color: Some(MUTED_TEXT) }),
text(&self.state.our_name).size(16).style(|_theme: &Theme| text::Style { color: Some(TEXT_COLOR) }),
row![
text_input("My ID", &self.state.our_id)
.padding(5)
.size(12)
.on_input(|_| Message::NoOp)
.style(|_theme, _status| text_input::Style {
background: Background::Color(Color::from_rgb(0.15, 0.16, 0.18)),
border: Border { radius: 4.0.into(), ..Default::default() },
value: MUTED_TEXT,
placeholder: MUTED_TEXT,
selection: Color::from_rgb(0.4, 0.5, 0.8),
icon: Color::TRANSPARENT,
}),
button(text("Copy").size(12))
.on_press(Message::CopyText(self.state.our_id_full.clone()))
.padding(5)
.style(|_theme, _status| button::Style {
background: Some(Background::Color(Color::from_rgb(0.3, 0.3, 0.35))),
text_color: Color::WHITE,
border: Border { radius: 4.0.into(), ..Default::default() },
..Default::default()
})
].spacing(5)
].spacing(5).padding(10);
let identity_container = container(identity_section)
.style(|_theme: &Theme| container::Style {
background: Some(Background::Color(Color::from_rgb(0.15, 0.16, 0.18))),
..Default::default()
});
let peers_title = text("ONLINE").size(12).style(|_theme: &Theme| text::Style { color: Some(MUTED_TEXT) });
let peers_content = self.state.peers.iter().fold(
Column::new().spacing(5),
|column, peer| column.push(view_peer(peer)),
);
let voice_section = column![
text("VOICE CONNECTED").size(12).style(|_theme: &Theme| text::Style {
color: Some(if self.is_in_voice { Color::from_rgb(0.4, 0.8, 0.4) } else { MUTED_TEXT })
}),
text("Input Device").size(10).style(|_theme: &Theme| text::Style { color: Some(MUTED_TEXT) }),
pick_list(
self.input_devices.clone(),
self.selected_device.clone(),
Message::InputDeviceSelected
).text_size(12).padding(5),
text("Output Device").size(10).style(|_theme: &Theme| text::Style { color: Some(MUTED_TEXT) }),
pick_list(
self.output_devices.clone(),
self.selected_output_device.clone(),
Message::OutputDeviceSelected
).text_size(12).padding(5),
button(
text(if self.is_in_voice { "Disconnect" } else { "Join Voice" }).size(14)
)
.on_press(Message::ToggleVoice)
.padding(8)
.style(move |_theme, _status| {
let bg = if self.is_in_voice { Color::from_rgb(0.8, 0.3, 0.3) } else { Color::from_rgb(0.3, 0.6, 0.4) };
button::Style {
background: Some(Background::Color(bg)),
text_color: Color::WHITE,
border: Border { radius: 4.0.into(), ..Default::default() },
..Default::default()
}
})
.width(Length::Fill),
button(
text(if self.state.media_status.contains("🖥 LIVE") { "Stop Screen" } else { "Share Screen" }).size(14)
)
.on_press(Message::ToggleScreen)
.padding(8)
.style(move |_theme, _status| {
let is_sharing = self.state.media_status.contains("🖥 LIVE");
let bg = if is_sharing { Color::from_rgb(0.8, 0.3, 0.3) } else { Color::from_rgb(0.3, 0.4, 0.6) };
button::Style {
background: Some(Background::Color(bg)),
text_color: Color::WHITE,
border: Border { radius: 4.0.into(), ..Default::default() },
..Default::default()
}
})
.width(Length::Fill),
// Audio Controls
text("Master Volume").size(10).style(|_theme: &Theme| text::Style { color: Some(MUTED_TEXT) }),
slider(0.0..=2.0, self.master_volume, Message::MasterVolumeChanged).step(0.05),
checkbox("Noise Cancellation", self.noise_cancel_enabled)
.on_toggle(Message::ToggleNoiseCancel)
.text_size(12)
.style(|_theme, _status| checkbox::Style {
background: Background::Color(INPUT_BG),
icon_color: Color::WHITE,
border: Border { radius: 4.0.into(), ..Default::default() },
text_color: Some(TEXT_COLOR),
}),
].spacing(10).padding(10);
let voice_panel = container(voice_section)
.style(|_theme: &Theme| container::Style {
background: Some(Background::Color(Color::from_rgb(0.15, 0.16, 0.18))), // Darker panel at bottom
..Default::default()
});
let sidebar = container(
column![
identity_container,
column![peers_title, peers_content].spacing(10).padding(10).height(Length::Fill),
voice_panel
]
)
.width(Length::Fixed(240.0))
.height(Length::Fill)
.style(|_theme: &Theme| container::Style {
background: Some(Background::Color(SIDEBAR_DARK)),
..Default::default()
});
// Main Layout
let main_content = column![chat_scroll, input_container]
.width(Length::Fill)
.height(Length::Fill);
let layout = row![sidebar, main_content]
.width(Length::Fill)
.height(Length::Fill);
container(layout)
.width(Length::Fill)
.height(Length::Fill)
.style(|_theme: &Theme| container::Style {
background: Some(Background::Color(BG_DARK)),
text_color: Some(TEXT_COLOR),
..Default::default()
})
.into()
}
pub fn subscription(&self) -> Subscription<Message> {
struct BackendSubscription;
let receiver = self.state_receiver.clone();
Subscription::run_with_id(
std::any::TypeId::of::<BackendSubscription>(),
stream::unfold(receiver, |receiver| async move {
let mut guard = receiver.lock().await;
if let Some(state) = guard.recv().await {
Some((Message::BackendUpdate(state), receiver.clone()))
} else {
// Wait a bit if channel closed or empty to avoid hot loop if logic changes
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
Some((Message::BackendUpdate(FrontendState::default()), receiver.clone()))
}
})
)
}
pub fn theme(&self) -> Theme {
Theme::Dark
}
}
// ... run function ...
pub fn run(flags: Flags) -> iced::Result {
iced::application(
ChatApp::title,
ChatApp::update,
ChatApp::view
)
.subscription(ChatApp::subscription)
.theme(ChatApp::theme)
.run_with(move || ChatApp::new(flags))
}
fn view_chat_entry(entry: &ChatEntry) -> Element<Message> {
let sender_color = if entry.is_self {
Color::from_rgb8(200, 200, 255)
} else if entry.is_system {
Color::from_rgb8(255, 100, 100)
} else {
Color::from_rgb8(100, 200, 100)
};
let sender = text(&entry.sender_name)
.style(move |_theme: &Theme| text::Style { color: Some(sender_color) })
.font(iced::font::Font::DEFAULT) // Sans-serif
.size(15);
let content = text(&entry.text)
.size(15)
.style(move |_theme: &Theme| text::Style { color: Some(TEXT_COLOR) });
let time = text(format_timestamp(entry.timestamp))
.size(11)
.style(move |_theme: &Theme| text::Style { color: Some(MUTED_TEXT) });
let header = row![sender, time].spacing(8).align_y(Alignment::Center);
// Rounded message bubble if needed, or just clean text like Discord
column![header, content].spacing(4).into()
}
fn view_peer(peer: &PeerInfo) -> Element<Message> {
let name = peer.name.as_deref().unwrap_or("Unknown");
// Audio activity border
let (border_width, border_color) = if peer.audio_level > 0.01 {
(2.0, Color::from_rgb(0.2, 0.8, 0.2)) // Green
} else {
(0.0, Color::TRANSPARENT)
};
let peer_info = column![
text(name).size(14).style(|_theme: &Theme| text::Style { color: Some(TEXT_COLOR) }),
row![
text(if peer.audio_level > 0.01 { "🔊" } else { "🔇" }).size(12),
text(peer.id.to_string().chars().take(8).collect::<String>())
.size(10)
.style(|_theme: &Theme| text::Style { color: Some(MUTED_TEXT) }),
].spacing(4)
].spacing(2);
let content = row![
peer_info,
button(text("Copy").size(10))
.on_press(Message::CopyText(peer.id.to_string()))
.padding(4)
.style(|_theme, _status| button::Style {
background: Some(Background::Color(Color::from_rgb(0.25, 0.26, 0.28))),
text_color: Color::WHITE,
border: Border { radius: 4.0.into(), ..Default::default() },
..Default::default()
})
]
.spacing(10)
.align_y(Alignment::Center);
container(content)
.padding(5)
.style(move |_theme: &Theme| container::Style {
background: None,
border: Border {
width: border_width,
color: border_color,
radius: 4.0.into(),
},
..Default::default()
})
.into()
}
fn format_timestamp(ts: u64) -> String {
if ts == 0 { return "".to_string(); }
match Utc.timestamp_millis_opt(ts as i64) {
chrono::LocalResult::Single(dt) => {
let local: DateTime<Local> = DateTime::from(dt);
local.format("%H:%M").to_string()
}
_ => "".to_string(),
}
}

View File

@@ -8,6 +8,7 @@ mod app_logic;
mod chat;
mod config;
mod file_transfer;
mod gui;
mod media;
mod net;
mod protocol;
@@ -57,6 +58,10 @@ struct Cli {
/// Download directory for received files
#[arg(short, long, default_value = "~/Downloads")]
download_dir: String,
/// Launch with GUI (Iced) instead of TUI
#[arg(long)]
gui: bool,
}
#[tokio::main]
@@ -129,7 +134,13 @@ async fn main() -> Result<()> {
let file_mgr = FileTransferManager::new(download_path);
// Pass mic name from config if present
// Pass mic name from config if present
let media = MediaState::new(config.media.mic_bitrate);
let media = MediaState::new(
config.media.mic_bitrate,
config.media.input_device.clone(),
config.media.output_device.clone(),
config.media.master_volume,
config.media.noise_suppression,
);
// Initialize App with Theme
let theme = crate::config::Theme::from(config.ui.clone());
@@ -172,6 +183,7 @@ async fn main() -> Result<()> {
name: Some(cli.name.clone()),
capabilities: None,
is_self: true,
audio_level: 0.0,
},
);
}
@@ -199,7 +211,6 @@ async fn main() -> Result<()> {
tokio::spawn(crate::web::start_web_server(
media.broadcast_tx.clone(),
media.mic_broadcast.clone(),
media.cam_broadcast.clone(),
media.screen_broadcast.clone(),
));
@@ -213,6 +224,76 @@ async fn main() -> Result<()> {
our_id_short,
);
if cli.gui {
use tokio::sync::mpsc;
// Channel for GUI -> AppLogic commands
let (gui_cmd_tx, mut gui_cmd_rx) = mpsc::channel(100);
// Channel for AppLogic -> GUI state updates
let (gui_state_tx, gui_state_rx) = mpsc::channel(100);
// Get initial state
let initial_state = app_logic.get_frontend_state().await;
// Spawn AppLogic loop
tokio::spawn(async move {
let mut interval = tokio::time::interval(std::time::Duration::from_millis(100));
loop {
let mut state_changed = false;
tokio::select! {
_ = interval.tick() => {
app_logic.file_mgr.check_timeouts();
}
Some(cmd) = gui_cmd_rx.recv() => {
match app_logic.handle_command(cmd).await {
Ok(true) => { // Quit command
break;
}
Ok(false) => {
state_changed = true;
}
Err(e) => {
tracing::error!("Command error: {}", e);
}
}
}
Some(event) = net_rx.recv() => {
app_logic.handle_net_event(event).await;
state_changed = true;
}
Some(event) = gossip_event_rx.recv() => {
app_logic.handle_net_event(event).await;
state_changed = true;
}
_ = tokio::signal::ctrl_c() => {
break;
}
}
if state_changed {
let new_state = app_logic.get_frontend_state().await;
if gui_state_tx.send(new_state).await.is_err() {
break;
}
}
}
// Shutdown logic
let _ = app_logic.net.shutdown().await;
app_logic.media.shutdown();
});
// Run GUI
let flags = crate::gui::Flags {
initial_state,
command_sender: gui_cmd_tx,
state_receiver: gui_state_rx,
};
crate::gui::run(flags)?;
return Ok(());
}
// Setup terminal
enable_raw_mode()?;
let mut stdout = io::stdout();
@@ -287,7 +368,7 @@ async fn run_event_loop(
match maybe_event {
Some(Ok(Event::Key(key))) => {
let cmd = app.handle_key(key);
if logic.handle_tui_command(cmd).await? {
if logic.handle_command(cmd).await? {
return Ok(());
}
}

View File

@@ -11,6 +11,11 @@ use tracing;
use crate::media::WebMediaEvent;
use crate::protocol::{decode_framed, write_framed, MediaKind, MediaStreamMessage};
use std::process::Stdio;
use tokio::io::{AsyncReadExt, BufReader, AsyncWriteExt, AsyncBufReadExt};
use tokio::process::Command;
use xcap::Monitor;
/// Manages a video capture session (camera or screen).
pub struct VideoCapture {
running: Arc<AtomicBool>,
@@ -18,35 +23,114 @@ pub struct VideoCapture {
}
impl VideoCapture {
/// Start video capture with web input (broadcast receiver).
pub async fn start_web(
kind: MediaKind,
_local_peer_id: iroh::EndpointId,
/// Start web-based screen share (relay from Web to Peers).
pub async fn start_web_screen(
peers: Vec<iroh::EndpointId>,
network_manager: crate::net::NetworkManager,
input_rx: tokio::sync::broadcast::Sender<Vec<u8>>,
source_tx: tokio::sync::broadcast::Sender<Vec<u8>>,
) -> Result<Self> {
let running = Arc::new(AtomicBool::new(true));
// Spawn sender tasks
let mut tasks = Vec::new();
// For each peer, spawn a sender task that subscribes to the source
for peer in peers {
let running = running.clone();
let net = network_manager.clone();
let rx = input_rx.subscribe();
let kind = kind.clone();
let rx = source_tx.subscribe();
let kind = MediaKind::Screen;
let task = tokio::spawn(async move {
if let Err(e) = run_video_sender_web(net, peer, kind, rx, running).await {
tracing::error!("Video sender web error: {}", e);
tasks.push(tokio::spawn(async move {
if let Err(e) = run_video_sender_native(net, peer, kind, rx, running).await {
tracing::error!("Video sender web screen error: {}", e);
}
});
tasks.push(task);
}));
}
Ok(Self {
running,
tasks, // Added tasks
tasks,
})
}
/// Start web-based camera share (relay from Web to Peers).
pub async fn start_web_camera(
peers: Vec<iroh::EndpointId>,
network_manager: crate::net::NetworkManager,
source_tx: tokio::sync::broadcast::Sender<Vec<u8>>,
) -> Result<Self> {
let running = Arc::new(AtomicBool::new(true));
let mut tasks = Vec::new();
// For each peer, spawn a sender task that subscribes to the source
for peer in peers {
let running = running.clone();
let net = network_manager.clone();
let rx = source_tx.subscribe();
let kind = MediaKind::Camera;
tasks.push(tokio::spawn(async move {
if let Err(e) = run_video_sender_native(net, peer, kind, rx, running).await {
tracing::error!("Video sender web camera error: {}", e);
}
}));
}
Ok(Self {
running,
tasks,
})
}
/// Start native video capture via FFmpeg.
pub async fn start_native(
kind: MediaKind,
_local_peer_id: iroh::EndpointId,
peers: Vec<iroh::EndpointId>,
network_manager: crate::net::NetworkManager,
broadcast_tx: tokio::sync::broadcast::Sender<WebMediaEvent>,
) -> Result<Self> {
let running = Arc::new(AtomicBool::new(true));
// Channel to distribute frames from FFmpeg to peer senders
let (frame_tx, _) = tokio::sync::broadcast::channel::<Vec<u8>>(100);
let mut tasks = Vec::new();
// 1. Spawn Capture task
let capture_running = running.clone();
let frame_tx_clone = frame_tx.clone();
let broadcast_tx_clone = broadcast_tx.clone();
let kind_clone = kind.clone();
tasks.push(tokio::spawn(async move {
let result = if matches!(kind_clone, MediaKind::Screen) {
run_xcap_capture(frame_tx_clone, broadcast_tx_clone, capture_running).await
} else {
run_ffmpeg_capture(kind_clone, frame_tx_clone, broadcast_tx_clone, capture_running).await
};
if let Err(e) = result {
tracing::error!("Capture error: {}", e);
}
}));
// 2. Spawn peer sender tasks (reuse run_video_sender_web logic but with internal channel)
for peer in peers {
let running = running.clone();
let net = network_manager.clone();
let rx = frame_tx.subscribe();
let kind = kind.clone();
tasks.push(tokio::spawn(async move {
if let Err(e) = run_video_sender_native(net, peer, kind, rx, running).await {
tracing::error!("Video sender native error: {}", e);
}
}));
}
Ok(Self {
running,
tasks,
})
}
@@ -58,19 +142,48 @@ impl VideoCapture {
}
}
/// Handle incoming video stream from a peer (Web Version).
/// Receives video frames (e.g. H.264/VP9 encoded inside protocol messages) and forwards to frontend.
pub async fn handle_incoming_video_web(
/// Handle incoming video stream from a peer (Native MPV Version).
/// Receives video frames (e.g. H.264/HEVC encoded inside protocol messages) and pipes them to MPV.
pub async fn handle_incoming_video_native(
from: iroh::EndpointId,
message: MediaStreamMessage,
mut recv: iroh::endpoint::RecvStream,
broadcast_tx: tokio::sync::broadcast::Sender<WebMediaEvent>,
_broadcast_tx: tokio::sync::broadcast::Sender<WebMediaEvent>,
) -> Result<()> {
let kind = match message {
MediaStreamMessage::VideoStart { kind, .. } => kind,
_ => anyhow::bail!("Expected VideoStart"),
};
tracing::info!("Starting {:?} stream handler for {}", kind, from);
tracing::info!("Starting {:?} stream handler for {} (Native MPV)", kind, from);
// Spawn mpv
let mut cmd = Command::new("mpv");
cmd.args(&[
"--no-terminal",
"--ontop",
"--profile=low-latency",
"--cache=no",
"--force-window",
"-", // Read from stdin
]);
cmd.stdin(Stdio::piped());
// We might want to quell stdout/stderr or log them
cmd.stdout(Stdio::null());
cmd.stderr(Stdio::null());
// Ensure process is killed when this task drops
cmd.kill_on_drop(true);
let mut child = match cmd.spawn() {
Ok(c) => c,
Err(e) => {
tracing::error!("Failed to spawn mpv: {}", e);
return Err(anyhow::anyhow!("Failed to spawn mpv: {}", e));
}
};
let mut stdin = child.stdin.take().expect("Failed to open mpv stdin");
use tokio::io::AsyncWriteExt;
loop {
let msg: MediaStreamMessage = match decode_framed(&mut recv).await {
@@ -80,13 +193,20 @@ impl VideoCapture {
match msg {
MediaStreamMessage::VideoFrame { data, .. } => {
// Broadcast to web
let short_id: String = format!("{}", from).chars().take(8).collect();
let _ = broadcast_tx.send(WebMediaEvent::Video {
peer_id: short_id,
kind: kind.clone(),
data,
});
// Write directly to mpv stdin
// The data is already NAL units with start codes (from our capture logic)
// Note: 'data' from VideoFrame contains [1 byte type][NAL Unit including start code]
// We need to skip the first byte which is our protocol's frame type indicator (Key/Delta)
// Wait, let's check run_ffmpeg_capture.
// It does: payload.push(frame_type); payload.extend_from_slice(&nal_data);
// So yes, we need to skip the first byte.
if data.len() > 1 {
if let Err(e) = stdin.write_all(&data[1..]).await {
tracing::error!("Failed to write to mpv: {}", e);
break;
}
}
}
MediaStreamMessage::VideoStop { .. } => {
tracing::info!("Peer stopped video");
@@ -95,6 +215,8 @@ impl VideoCapture {
_ => {}
}
}
let _ = child.kill().await;
Ok(())
}
}
@@ -105,11 +227,191 @@ impl Drop for VideoCapture {
}
}
// ---------------------------------------------------------------------------
// XCAP Capture Logic
// ---------------------------------------------------------------------------
async fn run_xcap_capture(
frame_tx: tokio::sync::broadcast::Sender<Vec<u8>>,
broadcast_preview: tokio::sync::broadcast::Sender<WebMediaEvent>,
running: Arc<AtomicBool>,
) -> Result<()> {
// 1. Get monitors
let monitors = Monitor::all().map_err(|e| anyhow::anyhow!("Failed to list monitors: {}", e))?;
if monitors.is_empty() {
return Err(anyhow::anyhow!("No monitors found"));
}
// Select first monitor for now
let monitor = &monitors[0];
let width = monitor.width().map_err(|e| anyhow::anyhow!("Failed to get monitor width: {}", e))?;
let height = monitor.height().map_err(|e| anyhow::anyhow!("Failed to get monitor height: {}", e))?;
let name = monitor.name().unwrap_or_else(|_| "Unknown Monitor".to_string());
tracing::info!("Starting xcap capture on monitor: {} ({}x{})", name, width, height);
// 2. Spawn FFmpeg to encode raw frames
// We feed raw RGBA frames to stdin
let mut cmd = Command::new("ffmpeg");
cmd.kill_on_drop(true);
cmd.args(&[
"-f", "rawvideo",
"-pixel_format", "rgba",
"-video_size", &format!("{}x{}", width, height),
"-framerate", "30",
"-i", "-", // Read raw frames from stdin
]);
// Output args (same as before: HEVC NVENC/libx265 -> Raw stream)
cmd.args(&[
"-vf", "scale=1280:720", // Force 720p resize
"-c:v", "hevc_nvenc", // Try hardware first
// Fallback or options...
"-b:v", "1M", // Lower bitrate to 1Mbps
"-g", "30", // Keyframe interval (GOP) 30
"-zerolatency", "1",
"-preset", "p4",
"-f", "hevc",
"-",
]);
cmd.stdin(Stdio::piped());
cmd.stdout(Stdio::piped());
// Log stderr
let stderr_file = std::fs::File::create("ffmpeg_xcap.log").unwrap();
cmd.stderr(Stdio::from(stderr_file));
let mut child = cmd.spawn()?;
let mut stdin = child.stdin.take().expect("Failed to open ffmpeg stdin");
let stdout = child.stdout.take().expect("Failed to open ffmpeg stdout");
// 3. Spawn thread/task to capture frames and write to FFmpeg stdin
// xcap is synchronous/blocking, so we should run it in a blocking task or separate thread
// But we need to write to async stdin.
let running_clone = running.clone();
let monitor_clone = monitor.clone(); // Monitor might not be cloneable easily? It is.
// We use a channel to send frames from blocking capture to async writer
let (img_tx, mut img_rx) = tokio::sync::mpsc::channel::<Vec<u8>>(2);
// Spawn blocking capture thread
std::thread::spawn(move || {
// Target 30fps
let frame_duration = std::time::Duration::from_millis(33);
while running_clone.load(Ordering::Relaxed) {
let start = std::time::Instant::now();
match monitor_clone.capture_image() {
Ok(image) => {
// image is RgbaImage (Vec<u8>)
// We need raw bytes
let bytes = image.into_raw();
if img_tx.blocking_send(bytes).is_err() {
break;
}
}
Err(e) => {
eprintln!("xcap capture failed: {}", e);
// Don't break immediately, maybe transient?
std::thread::sleep(std::time::Duration::from_millis(100));
}
}
// Sleep to maintain framerate
let elapsed = start.elapsed();
if elapsed < frame_duration {
std::thread::sleep(frame_duration - elapsed);
} else {
// Even if we are slow, sleep a tiny bit to yield CPU
std::thread::sleep(std::time::Duration::from_millis(1));
}
}
});
// 4. Async task to write frames to FFmpeg stdin
let stdin_task = tokio::spawn(async move {
while let Some(frame_data) = img_rx.recv().await {
if stdin.write_all(&frame_data).await.is_err() {
break;
}
}
});
// 5. Read FFmpeg stdout and distribute (Same logic as run_ffmpeg_capture)
let mut reader = BufReader::new(stdout);
let mut buffer = Vec::with_capacity(1024 * 1024);
let mut temp_buf = [0u8; 4096];
loop {
if !running.load(Ordering::Relaxed) {
break;
}
let n = match reader.read(&mut temp_buf).await {
Ok(0) => break, // EOF
Ok(n) => n,
Err(_) => break,
};
buffer.extend_from_slice(&temp_buf[0..n]);
// Find NAL units
while let Some(start_idx) = find_start_code(&buffer) {
let end_idx = if let Some(next_start) = find_start_code_from(&buffer, start_idx + 4) {
next_start
} else {
break; // Wait for more data
};
let nal_data = buffer.drain(start_idx..end_idx).collect::<Vec<u8>>();
// Check if it's 3-byte or 4-byte start code
let start_code_len = if nal_data[2] == 1 { 3 } else { 4 };
// Construct payload
let mut payload: Vec<u8> = Vec::with_capacity(1 + nal_data.len());
// Check NAL type (HEVC/H.265)
// NAL header is after start code.
// data[start_code_len] is the NAL header.
let nal_header_byte = nal_data[start_code_len];
// Type is bits 1-6 (0x7E) shifted right by 1.
let nal_type = (nal_header_byte & 0x7E) >> 1;
// HEVC Keyframes:
// 16-21: IRAP (BLA_W_LP, BLA_W_RADL, BLA_N_LP, IDR_W_RADL, IDR_N_LP, CRA_NUT)
// 32-34: VPS, SPS, PPS (Parameters - treat as critical/key)
let is_key = (nal_type >= 16 && nal_type <= 21) || (nal_type >= 32 && nal_type <= 34);
let frame_type = if is_key { 0u8 } else { 1u8 };
payload.push(frame_type);
payload.extend_from_slice(&nal_data);
let _ = frame_tx.send(payload.clone());
let _ = broadcast_preview.send(WebMediaEvent::Video {
peer_id: "local".to_string(),
kind: MediaKind::Screen,
data: payload,
});
}
}
let _ = child.kill().await;
stdin_task.abort();
Ok(())
}
// ---------------------------------------------------------------------------
// FFmpeg Capture Logic
// ---------------------------------------------------------------------------
async fn run_video_sender_web(
async fn run_video_sender_native(
network_manager: crate::net::NetworkManager,
peer: iroh::EndpointId,
kind: MediaKind,
@@ -119,13 +421,14 @@ async fn run_video_sender_web(
let (mut send, _) = network_manager
.open_media_stream(peer, kind.clone())
.await?;
// For web, we assume fixed resolution and fps for now.
// Send Start message
write_framed(
&mut send,
&MediaStreamMessage::VideoStart {
kind,
width: 640,
height: 480,
width: 1280, // Target 720p
height: 720,
fps: 30,
},
)
@@ -134,9 +437,10 @@ async fn run_video_sender_web(
while running.load(Ordering::Relaxed) {
match input_rx.recv().await {
Ok(data) => {
// Web sends WebP chunk (full frame)
// FFmpeg data is already [FrameType][VP8 Chunk], see run_ffmpeg_capture
// Just wrap in protocol message
let msg = MediaStreamMessage::VideoFrame {
sequence: 0, // Sequence not used for web input, set to 0
sequence: 0,
timestamp_ms: std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
@@ -147,18 +451,325 @@ async fn run_video_sender_web(
break;
}
}
Err(tokio::sync::broadcast::error::RecvError::Closed) => break,
Err(tokio::sync::broadcast::error::RecvError::Lagged(_)) => {
tracing::warn!("Video sender lagged");
}
Err(_) => break,
}
}
let _ = write_framed(&mut send, &MediaStreamMessage::VideoStop { kind }).await;
send.finish()?;
Ok(())
}
// ---------------------------------------------------------------------------
// Player Logic (MPV/VLC)
// ---------------------------------------------------------------------------
async fn run_ffmpeg_capture(
kind: MediaKind,
frame_tx: tokio::sync::broadcast::Sender<Vec<u8>>,
broadcast_preview: tokio::sync::broadcast::Sender<WebMediaEvent>,
running: Arc<AtomicBool>,
) -> Result<()> {
let mut cmd = Command::new("ffmpeg");
cmd.kill_on_drop(true);
// Output args: Robust Encoder Selection
// Try: hevc_nvenc -> h264_nvenc -> libx264
struct EncoderConfig {
name: &'static str,
codec: &'static str,
opts: Vec<&'static str>,
format: &'static str, // "hevc" or "h264" (raw stream format)
filter: &'static str, // "hevc_mp4toannexb" or "h264_mp4toannexb"
pixel_format: Option<&'static str>, // Force pixel format if needed
}
let encoders = vec![
EncoderConfig {
name: "hevc_nvenc (Hardware)",
codec: "hevc_nvenc",
opts: vec!["-b:v", "1M", "-g", "30", "-zerolatency", "1", "-preset", "p4"],
format: "hevc",
filter: "hevc_mp4toannexb",
pixel_format: None, // NVENC usually handles formats well
},
EncoderConfig {
name: "h264_nvenc (Hardware Fallback)",
codec: "h264_nvenc",
opts: vec!["-b:v", "1.5M", "-g", "30", "-zerolatency", "1", "-preset", "p4"],
format: "h264",
filter: "h264_mp4toannexb",
pixel_format: None,
},
EncoderConfig {
name: "libx264 (Software Fallback)",
codec: "libx264",
opts: vec!["-preset", "ultrafast", "-tune", "zerolatency", "-b:v", "1M", "-g", "30"],
format: "h264",
filter: "h264_mp4toannexb",
pixel_format: Some("yuv420p"), // libx264 often needs yuv420p
},
];
let mut final_child = None;
let mut chosen_filter = "";
// We need to keep the stdout/stderr open
for enc in &encoders {
tracing::info!("Trying encoder: {}", enc.name);
let mut cmd = Command::new("ffmpeg");
cmd.kill_on_drop(true);
// Input args (re-applied for each attempt)
// TODO: Detect platform/device better. For now assuming Linux/V4L2.
match kind {
MediaKind::Camera => {
cmd.args(&[
"-f", "v4l2",
"-framerate", "30",
"-video_size", "1280x720",
"-i", "/dev/video0",
]);
}
MediaKind::Screen => {
// Always use x11grab (works on X11 and XWayland)
let display_env = std::env::var("DISPLAY").unwrap_or_else(|_| ":0.0".to_string());
tracing::info!("Using x11grab on display: {}", display_env);
cmd.args(&[
"-f", "x11grab",
"-framerate", "30",
"-video_size", "1920x1080", // Input size (assuming 1080p for now, but safer to autodect or be large)
"-i", &display_env,
"-vf", "scale=1280:720", // Force 720p resize
]);
}
_ => return Ok(()),
}
// Pixel format if needed
if let Some(pix_fmt) = enc.pixel_format {
cmd.args(&["-pix_fmt", pix_fmt]);
}
// Encoder args
cmd.arg("-c:v").arg(enc.codec);
cmd.args(&enc.opts);
// Bitstream filter to ensure Annex B (start codes)
cmd.arg("-bsf:v").arg(enc.filter);
// Output format
cmd.arg("-f").arg(enc.format);
cmd.arg("-");
cmd.stdout(Stdio::piped());
cmd.stderr(Stdio::piped());
match cmd.spawn() {
Ok(mut child) => {
// Wait a bit to see if it crashes immediately
// We sleep for 500ms to let ffmpeg initialize
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
if let Ok(Some(status)) = child.try_wait() {
tracing::warn!("Encoder {} failed immediately with status: {}", enc.name, status);
// Read stderr to see why
if let Some(mut stderr) = child.stderr.take() {
let mut err_buf = String::new();
let _ = stderr.read_to_string(&mut err_buf).await;
tracing::warn!("FFmpeg stderr: {}", err_buf);
}
continue; // Try next
}
// It seems to be running
tracing::info!("Selected encoder: {}", enc.name);
tracing::info!("Capture loop started");
// Redirect stderr to log file or tracing
if let Some(stderr) = child.stderr.take() {
// Spawn a task to log stderr line by line
let running_log = running.clone();
tokio::spawn(async move {
let mut reader = BufReader::new(stderr);
let mut line = String::new();
while running_log.load(Ordering::Relaxed) {
match reader.read_line(&mut line).await {
Ok(0) => break, // EOF
Ok(_) => {
// Log errors or critical warnings
if line.contains("Error") || line.contains("error") || line.contains("fail") {
tracing::error!("FFmpeg: {}", line.trim());
}
line.clear();
}
Err(_) => break,
}
}
});
}
final_child = Some(child);
chosen_filter = enc.filter;
break;
}
Err(e) => {
tracing::warn!("Failed to spawn encoder {}: {}", enc.name, e);
continue;
}
}
}
let mut child = match final_child {
Some(c) => c,
None => {
tracing::error!("All encoders failed. Cannot start capture.");
return Err(anyhow::anyhow!("All encoders failed"));
}
};
let stdout = child.stdout.take().expect("Failed to open stdout");
// We don't need BufReader for raw check if we just read blocks, but fine to use it or just AsyncRead
let mut reader = BufReader::new(stdout);
// Raw H.264 parsing (Annex B)
// Stream is a sequence of NAL units, each starting with 00 00 00 01 (or 00 00 01)
// We need to buffer and split.
let mut buffer = Vec::with_capacity(1024 * 1024); // 1MB buffer
let mut temp_buf = [0u8; 4096];
while running.load(Ordering::Relaxed) {
let n = match reader.read(&mut temp_buf).await {
Ok(0) => break, // EOF
Ok(n) => n,
Err(_) => break,
};
buffer.extend_from_slice(&temp_buf[0..n]);
// Find NAL units in buffer
// A NAL unit starts with 00 00 00 01
// We look for start codes.
while let Some(start_idx) = find_start_code(&buffer) {
// If we found a start code at index 0, we can't extract a frame yet
// unless we have another start code later.
// But actually, the buffer MIGHT have multiple NALs.
// We need to find the NEXT start code to know where this one ends.
// Search from start_idx + 4
let end_idx = if let Some(next_start) = find_start_code_from(&buffer, start_idx + 4) {
next_start
} else {
// No next start code yet.
// If the buffer is getting huge, maybe we should just consume it?
// But for H.264 streaming we must be precise.
// Wait for more data.
break;
};
// Extract NAL unit (including start code? Browsers usually want it for AVC1/AnnexB)
// WebCodecs EncodedVideoChunk expects:
// "For 'avc1' (H.264), the chunk data must be an Annex B NAL unit."
// So we include the 00 00 00 01.
let nal_data = buffer.drain(start_idx..end_idx).collect::<Vec<u8>>();
// Send NAL
// Frame Type detection for H.264:
// NAL type is in the first byte AFTER the start code.
// Start Code is 00 00 00 01 (4 bytes) or 00 00 01 (3 bytes)
// find_start_code finds 00 00 00 01.
// Let's handle 3-byte start codes too? ffmpeg -f h264 usually sends 4-byte.
// Check if it's 3-byte or 4-byte start code
let start_code_len = if nal_data[2] == 1 { 3 } else { 4 };
// Construct payload
let mut payload: Vec<u8> = Vec::with_capacity(1 + nal_data.len());
// Check NAL type
// nal_data[start_code_len] is the NAL header (first byte).
let nal_header_byte = nal_data[start_code_len];
let is_key = if chosen_filter.contains("hevc") {
// HEVC (H.265)
// Type is bits 1-6 (0x7E) shifted right by 1.
let nal_type = (nal_header_byte & 0x7E) >> 1;
// HEVC Keyframes:
// 16-21: IRAP (BLA_W_LP, BLA_W_RADL, BLA_N_LP, IDR_W_RADL, IDR_N_LP, CRA_NUT)
// 32-34: VPS, SPS, PPS (Parameters - treat as critical/key)
(nal_type >= 16 && nal_type <= 21) || (nal_type >= 32 && nal_type <= 34)
} else {
// H.264 (AVC)
// Type is lower 5 bits (0x1F)
let nal_type = nal_header_byte & 0x1F;
// H.264 Keyframes:
// 5: IDR (Instantaneous Decoding Refresh) - Keyframe
// 7: SPS (Sequence Parameter Set)
// 8: PPS (Picture Parameter Set)
nal_type == 5 || nal_type == 7 || nal_type == 8
};
let frame_type = if is_key { 0u8 } else { 1u8 };
payload.push(frame_type);
payload.extend_from_slice(&nal_data);
// Send to peers
let _ = frame_tx.send(payload.clone());
// Send to local web preview
let _ = broadcast_preview.send(WebMediaEvent::Video {
peer_id: "local".to_string(),
kind: kind.clone(),
data: payload,
});
// buffer now starts at what was end_idx (because of drain)
// drain removes items, so indexes shift.
// wait, drain(start..end) removes items. buffer automatically shrinks.
// so we loop again to see if there is ANOTHER start code at 0?
// Actually, `find_start_code` searches from 0.
// If we drained 0..end_idx, the next bytes are at 0.
}
}
let _ = child.kill().await;
Ok(())
}
fn find_start_code(data: &[u8]) -> Option<usize> {
find_start_code_from(data, 0)
}
fn find_start_code_from(data: &[u8], start: usize) -> Option<usize> {
if data.len() < 3 { return None; }
for i in start..data.len() - 2 {
// Look for 00 00 01
if data[i] == 0 && data[i+1] == 0 && data[i+2] == 1 {
// Check if it's actually 00 00 00 01 (4 bytes)
// If i > 0 and data[i-1] == 0, then the start code might have been at i-1
// But we iterate forward.
// If we find 00 00 01 at i, we return i.
// If there was a 0 before it, it would have been found as 00 00 01 at i-1?
// Wait. 00 00 00 01 contains 00 00 01 starting at offset 1.
// So if we have 00 00 00 01:
// i=0: 00 00 00 -> No.
// i=1: 00 00 01 -> Yes. Return 1.
// But the start code is at 0!
// Correct logic:
// If we find 00 00 01 at i, check if i > 0 and data[i-1] == 0.
// If so, the start code is at i-1 (4 bytes).
// Return i-1.
if i > start && data[i-1] == 0 {
return Some(i-1);
}
return Some(i);
}
}
None
}

View File

@@ -17,6 +17,8 @@ use crate::protocol::{decode_framed, MediaKind, MediaStreamMessage};
use std::sync::atomic::{AtomicU32, Ordering};
use std::sync::Arc;
use cpal::traits::{DeviceTrait, HostTrait};
use self::capture::VideoCapture;
use self::voice::VoiceChat;
@@ -38,8 +40,6 @@ pub enum WebMediaEvent {
pub struct MediaState {
/// Active voice chat session (if any).
voice: Option<VoiceChat>,
/// Active camera capture (if any).
camera: Option<VideoCapture>,
/// Active screen capture (if any).
screen: Option<VideoCapture>,
/// Playback task handles for incoming streams (voice/video).
@@ -49,27 +49,31 @@ pub struct MediaState {
pub broadcast_tx: tokio::sync::broadcast::Sender<WebMediaEvent>,
// Input channels (from Web -> MediaState -> Peers)
pub mic_broadcast: tokio::sync::broadcast::Sender<Vec<f32>>,
pub cam_broadcast: tokio::sync::broadcast::Sender<Vec<u8>>,
pub screen_broadcast: tokio::sync::broadcast::Sender<Vec<u8>>,
pub mic_bitrate: Arc<AtomicU32>,
pub input_device: Option<String>,
pub output_device: Option<String>,
pub initial_master_volume: f32,
pub initial_noise_suppression: bool,
}
impl MediaState {
pub fn new(mic_bitrate: u32) -> Self {
pub fn new(mic_bitrate: u32, input_device: Option<String>, output_device: Option<String>, master_volume: f32, noise_suppression: bool) -> Self {
let (broadcast_tx, _) = tokio::sync::broadcast::channel(100);
let (mic_broadcast, _) = tokio::sync::broadcast::channel(100);
let (cam_broadcast, _) = tokio::sync::broadcast::channel(100);
let (screen_broadcast, _) = tokio::sync::broadcast::channel(100);
Self {
voice: None,
camera: None,
screen: None,
incoming_media: Vec::new(),
broadcast_tx,
mic_broadcast,
cam_broadcast,
screen_broadcast,
mic_bitrate: Arc::new(AtomicU32::new(mic_bitrate)),
input_device,
output_device,
initial_master_volume: master_volume,
initial_noise_suppression: noise_suppression,
}
}
@@ -77,16 +81,150 @@ impl MediaState {
self.mic_bitrate.store(bitrate, Ordering::Relaxed);
}
pub fn set_input_device(&mut self, device_name: String) {
self.input_device = Some(device_name);
}
pub fn set_output_device(&mut self, device_name: String) {
self.output_device = Some(device_name);
}
pub fn get_input_devices(&self) -> Vec<String> {
let mut names = Vec::new();
// Prioritize JACK if available, otherwise ALSA/Pulse/WASAPI
let available_hosts = cpal::available_hosts();
let mut hosts = Vec::new();
// Push JACK first if available
if available_hosts.contains(&cpal::HostId::Jack) {
hosts.push(cpal::host_from_id(cpal::HostId::Jack).unwrap());
}
// Then default host
hosts.push(cpal::default_host());
for host in hosts {
if let Ok(devices) = host.input_devices() {
for device in devices {
if let Ok(name) = device.name() {
// Filter out common noise/unusable devices
if name.contains("dmix") || name.contains("dsnoop") || name.contains("null") {
continue;
}
// Clean up ALSA names
// Example: "sysdefault:CARD=PCH" -> "PCH (sysdefault)"
// Example: "front:CARD=Microphone,DEV=0" -> "Microphone (front)"
let clean_name = if let Some(start) = name.find("CARD=") {
let rest = &name[start + 5..];
let card_name = rest.split(',').next().unwrap_or(rest);
let prefix = name.split(':').next().unwrap_or("Unknown");
format!("{} ({})", card_name, prefix)
} else if name.contains("HDA Intel PCH") {
// Simplify generic Intel names if possible
name
} else {
name
};
names.push(clean_name);
}
}
}
}
// Dedup and sort
names.sort();
names.dedup();
names
}
pub fn get_output_devices(&self) -> Vec<String> {
let mut names = Vec::new();
// Prioritize JACK if available
let available_hosts = cpal::available_hosts();
let mut hosts = Vec::new();
if available_hosts.contains(&cpal::HostId::Jack) {
hosts.push(cpal::host_from_id(cpal::HostId::Jack).unwrap());
}
hosts.push(cpal::default_host());
for host in hosts {
if let Ok(devices) = host.output_devices() {
for device in devices {
if let Ok(name) = device.name() {
if name.contains("dmix") || name.contains("dsnoop") || name.contains("null") {
continue;
}
let clean_name = if let Some(start) = name.find("CARD=") {
let rest = &name[start + 5..];
let card_name = rest.split(',').next().unwrap_or(rest);
let prefix = name.split(':').next().unwrap_or("Unknown");
format!("{} ({})", card_name, prefix)
} else {
name
};
names.push(clean_name);
}
}
}
}
names.sort();
names.dedup();
names
}
// -----------------------------------------------------------------------
// Public state queries
// -----------------------------------------------------------------------
pub fn voice_enabled(&self) -> bool {
self.voice.is_some()
pub fn set_volume(&self, volume: f32) {
if let Some(voice) = &self.voice {
voice.set_volume(volume);
}
}
pub fn camera_enabled(&self) -> bool {
self.camera.is_some()
pub fn get_volume(&self) -> f32 {
if let Some(voice) = &self.voice {
voice.get_volume()
} else {
self.initial_master_volume
}
}
pub fn is_denoise_enabled(&self) -> bool {
if let Some(voice) = &self.voice {
voice.is_denoise_enabled()
} else {
self.initial_noise_suppression
}
}
pub fn toggle_denoise(&self) -> Option<bool> {
if let Some(voice) = &self.voice {
Some(voice.toggle_denoise())
} else {
None
}
}
pub fn get_peer_levels(&self) -> std::collections::HashMap<EndpointId, f32> {
if let Some(voice) = &self.voice {
voice.get_peer_levels()
} else {
std::collections::HashMap::new()
}
}
pub fn voice_enabled(&self) -> bool {
self.voice.is_some()
}
pub fn screen_enabled(&self) -> bool {
@@ -107,61 +245,32 @@ impl MediaState {
"🎤 Voice chat stopped"
} else {
// Start — open media streams to all peers
// For web capture, we don't open streams here. start_web does it.
let peers = net.peers.lock().await;
match VoiceChat::start_web(
// Use Native Capture
match VoiceChat::start_native(
net.clone(),
peers.keys().cloned().collect(),
self.mic_broadcast.clone(),
self.mic_broadcast.subscribe(), // Subscribe to get new receiver!
self.broadcast_tx.clone(),
self.mic_bitrate.clone(),
self.input_device.clone(),
self.output_device.clone(), // Added output device
self.initial_master_volume,
self.initial_noise_suppression,
) {
Ok(vc) => {
self.voice = Some(vc);
"🎤 Voice chat started (Web)"
"🎤 Voice chat started (Native)"
}
Err(e) => {
tracing::error!("Failed to start voice chat: {}", e);
tracing::error!("Failed to start native voice chat: {}", e);
"🎤 Failed to start voice chat"
}
}
}
}
/// Toggle camera capture.
pub async fn toggle_camera(&mut self, net: NetworkManager) -> &'static str {
// We use ffmpeg now, which doesn't strictly depend on pipewire crate,
// but likely requires pipewire daemon or v4l2.
// We kept pipewire check for consistency but it might be loose.
if self.camera.is_some() {
if let Some(mut c) = self.camera.take() {
c.stop();
}
"📷 Camera stopped"
} else {
// Start
let peers = net.peers.lock().await;
match VideoCapture::start_web(
MediaKind::Camera,
net.our_id,
peers.keys().cloned().collect(),
net.clone(),
self.cam_broadcast.clone(),
)
.await
{
Ok(vc) => {
self.camera = Some(vc);
"📷 Camera started (Web)"
}
Err(e) => {
tracing::error!("Failed to start camera: {}", e);
"📷 Failed to start camera"
}
}
}
}
/// Toggle screen sharing.
pub async fn toggle_screen(&mut self, net: NetworkManager) -> &'static str {
if self.screen.is_some() {
@@ -172,22 +281,24 @@ impl MediaState {
} else {
// Start
let peers = net.peers.lock().await;
match VideoCapture::start_web(
// Use Native Capture (FFmpeg)
match VideoCapture::start_native(
MediaKind::Screen,
net.our_id,
peers.keys().cloned().collect(),
net.clone(),
self.screen_broadcast.clone(),
self.broadcast_tx.clone(),
)
.await
{
Ok(vc) => {
self.screen = Some(vc);
"🖥️ Screen share started (Web)"
"🖥️ Screen share started (Native FFmpeg)"
}
Err(e) => {
tracing::error!("Failed to start screen share: {}", e);
"🖥️ Failed to start screen share"
"🖥️ Failed to start screen share (Install FFmpeg)"
}
}
}
@@ -201,36 +312,30 @@ impl MediaState {
pub fn handle_incoming_media(
&mut self,
from: EndpointId,
kind: MediaKind,
_kind: MediaKind,
_send: iroh::endpoint::SendStream,
mut recv: iroh::endpoint::RecvStream,
) {
let broadcast_tx = self.broadcast_tx.clone();
// Spawn a task to determine stream type and handle it
let handle = tokio::spawn(async move {
// Read first message to determine type.
// Note: We already know the kind from ALPN, but we still decode the start message.
match decode_framed::<MediaStreamMessage>(&mut recv).await {
Ok(msg) => match msg {
MediaStreamMessage::AudioStart { .. } => {
if kind != MediaKind::Voice {
tracing::warn!("ALPN mismatch: expected Voice, got AudioStart");
}
tracing::info!("Accepted Audio stream from {:?}", from);
if let Err(e) =
VoiceChat::handle_incoming_audio_web(from, msg, recv, broadcast_tx)
.await
{
tracing::error!("Audio web playback error: {}", e);
}
// DEPRECATED in Native Datagram mode
tracing::warn!("Received Audio stream from {} (unexpected in datagram mode)", from);
// We could support stream fallback, but for now we ignore or log.
// Or we can close it.
}
MediaStreamMessage::VideoStart { .. } => {
tracing::info!("Accepted Video stream from {:?}", from);
if let Err(e) =
VideoCapture::handle_incoming_video_web(from, msg, recv, broadcast_tx)
VideoCapture::handle_incoming_video_native(from, msg, recv, broadcast_tx)
.await
{
tracing::error!("Video web playback error: {}", e);
tracing::error!("Video native playback error: {}", e);
}
}
_ => {
@@ -251,26 +356,25 @@ impl MediaState {
}
});
// Store handle to allow cleanup on shutdown
// We clean up finished tasks periodically or on shutdown
self.incoming_media.push(handle);
// Clean up finished tasks
self.incoming_media.retain(|h| !h.is_finished());
}
/// Handle an incoming datagram (unreliable audio/video).
pub fn handle_incoming_datagram(&mut self, from: EndpointId, data: bytes::Bytes) {
// We assume datagrams are for VOICE for now (simplification).
// Or we should add a prefix byte?
// Since we are optimizing audio, let's assume it's audio.
// But if we add video datagrams later...
// For now, let's try to interpret as audio.
// Since VoiceChat expects `MediaStreamMessage`, we need to see how `postcard` serialized it.
// If sender sends serialized `AudioData`, we can deserialize it.
if let Some(voice) = &mut self.voice {
voice.handle_datagram(from, data, self.broadcast_tx.clone());
if data.is_empty() { return; }
// Check first byte for type
match data[0] {
1 => { // Audio
if let Some(voice) = &mut self.voice {
voice.handle_datagram(from, data);
}
},
// 2 => Video?
_ => {
// tracing::trace!("Unknown datagram type: {}", data[0]);
}
}
}
@@ -285,17 +389,12 @@ impl MediaState {
} else {
"🎤 off"
};
let cam = if self.camera_enabled() {
"📷 LIVE"
} else {
"📷 off"
};
let scr = if self.screen_enabled() {
"🖥 LIVE"
} else {
"🖥 off"
};
format!("{} {} {}", mic, cam, scr)
format!("{} {} {}", mic, "I", scr)
}
/// Shut down all active media.
@@ -303,9 +402,6 @@ impl MediaState {
if let Some(mut v) = self.voice.take() {
v.stop();
}
if let Some(mut c) = self.camera.take() {
c.stop();
}
if let Some(mut s) = self.screen.take() {
s.stop();
}
@@ -320,7 +416,3 @@ impl Drop for MediaState {
self.shutdown();
}
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------

View File

@@ -1,80 +1,184 @@
//! Voice capture and playback using PipeWire + Audiopus (via Songbird dependency).
//! Voice capture and playback using cpal + Audiopus (Opus).
//!
//! Architecture:
//! - Capture runs on a dedicated OS thread (PipeWire main loop).
//! - PipeWire process callback copies PCM → crossbeam channel.
//! - Async task reads from channel, encodes with Opus, sends over QUIC.
//! - Playback: receives Opus packets from QUIC, decodes, feeds to PipeWire output.
//! Native implementation using QUIC Datagrams.
use std::collections::HashMap;
use std::sync::atomic::{AtomicBool, AtomicU32, Ordering};
use std::sync::Arc;
use std::thread;
use std::time::Duration;
use crate::media::WebMediaEvent;
use crate::protocol::{decode_framed, MediaStreamMessage};
use anyhow::Result;
use postcard;
// Use audiopus types directly
use anyhow::{Result, anyhow};
use dashmap::DashMap;
use nnnoiseless::DenoiseState;
use audiopus::{
coder::Decoder as OpusDecoder, coder::Encoder as OpusEncoder, Application, Bitrate, Channels,
SampleRate,
};
use bytes::Bytes;
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
use crossbeam_channel::{unbounded, Receiver, Sender};
use iroh::EndpointId;
use ringbuf::{traits::*, HeapRb};
use crate::media::WebMediaEvent;
const PACKET_TYPE_AUDIO: u8 = 1;
const FRAME_SIZE_SAMPLES: usize = 960; // 20ms at 48kHz
// Wrapper to make OpusDecoder Send + Sync
struct SendDecoder(OpusDecoder);
unsafe impl Send for SendDecoder {}
unsafe impl Sync for SendDecoder {}
// Wrapper to make AudioProducer Sync
struct SyncAudioProducer(ringbuf::HeapProd<f32>);
unsafe impl Sync for SyncAudioProducer {}
unsafe impl Send for SyncAudioProducer {} // It is already Send, but for clarity
impl std::ops::Deref for SyncAudioProducer {
type Target = ringbuf::HeapProd<f32>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl std::ops::DerefMut for SyncAudioProducer {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
// Types for RingBuf
type AudioProducer = SyncAudioProducer;
type AudioConsumer = ringbuf::HeapCons<f32>;
// Constants
const SAMPLE_RATE_VAL: i32 = 48000;
const FRAME_SIZE_MS: u32 = 20; // 20ms
const FRAME_SIZE_SAMPLES: usize = (SAMPLE_RATE_VAL as usize * FRAME_SIZE_MS as usize) / 1000;
/// Main voice chat coordination.
pub struct VoiceChat {
running: Arc<AtomicBool>,
capture_thread: Option<thread::JoinHandle<()>>,
tasks: Vec<tokio::task::JoinHandle<()>>,
datagram_decoders: std::collections::HashMap<iroh::EndpointId, OpusDecoder>,
// Capture and Playback threads
capture_thread: Option<thread::JoinHandle<()>>,
playback_thread: Option<thread::JoinHandle<()>>,
// Per-peer state: Decoder + Jitter Buffer Producer
peer_audio_sinks: HashMap<EndpointId, (SendDecoder, AudioProducer)>,
// Channel to notify playback thread of new peers
new_peer_tx: Sender<(EndpointId, AudioConsumer)>,
// Audio processing controls
pub denoise_enabled: Arc<AtomicBool>,
pub output_volume: Arc<AtomicU32>, // stored as f32 bits
pub peer_levels: Arc<DashMap<EndpointId, f32>>,
}
impl VoiceChat {
/// Start voice chat session (Web Version).
/// Uses browser for capture/playback handling implicitly via `MediaState` channels,
/// but here we handle the NETWORK side encoding/decoding.
pub fn start_web(
/// Start voice chat session (Native Version with CPAL + QUIC Datagrams).
pub fn start_native(
net: crate::net::NetworkManager,
peers: Vec<iroh::EndpointId>, // Multiple peers
mic_rx: tokio::sync::broadcast::Receiver<Vec<f32>>,
peers: Vec<EndpointId>,
mic_tx: tokio::sync::broadcast::Sender<Vec<f32>>,
_mic_rx: tokio::sync::broadcast::Receiver<Vec<f32>>,
_broadcast_tx: tokio::sync::broadcast::Sender<WebMediaEvent>,
mic_bitrate: Arc<AtomicU32>,
input_device_name: Option<String>,
output_device_name: Option<String>,
initial_volume: f32,
initial_denoise: bool,
) -> Result<Self> {
let running = Arc::new(AtomicBool::new(true));
let denoise_enabled = Arc::new(AtomicBool::new(initial_denoise));
let output_volume = Arc::new(AtomicU32::new(initial_volume.to_bits()));
let peer_levels = Arc::new(DashMap::new());
tracing::info!("Starting Native Voice Chat...");
// 1. Setup Playback Thread (CPAL Output)
let (new_peer_tx, new_peer_rx) = unbounded::<(EndpointId, AudioConsumer)>();
let playback_running = running.clone();
let playback_device_name = output_device_name.clone();
let playback_volume = output_volume.clone();
let playback_levels = peer_levels.clone();
let playback_thread = thread::spawn(move || {
run_playback_loop(playback_running, new_peer_rx, playback_device_name, playback_volume, playback_levels);
});
// 2. Setup Capture Thread (CPAL Input)
let capture_running = running.clone();
let mic_tx_capture = mic_tx.clone();
let capture_device_name = input_device_name.clone();
let capture_denoise = denoise_enabled.clone();
let capture_thread = thread::spawn(move || {
run_capture_loop(capture_running, mic_tx_capture, capture_device_name, capture_denoise);
});
// 3. Setup Network Sender Task (Opus -> Datagrams)
let mut tasks = Vec::new();
// Spawn a single task to encode once and send to all peers.
let sender_running = running.clone();
let net_clone = net.clone();
let mic_bitrate_clone = mic_bitrate.clone();
let mic_rx_sender = mic_tx.subscribe(); // Subscribe to capture
let sender_task = tokio::spawn(async move {
if let Err(e) = run_opis_sender_web_multi(
run_network_sender(
net_clone,
peers,
mic_rx,
mic_rx_sender,
sender_running,
mic_bitrate_clone,
)
.await
{
tracing::error!("Voice sender failed: {}", e);
}
).await;
});
tasks.push(sender_task);
Ok(Self {
running,
capture_thread: None,
tasks,
datagram_decoders: std::collections::HashMap::new(),
capture_thread: Some(capture_thread),
playback_thread: Some(playback_thread),
peer_audio_sinks: HashMap::new(),
new_peer_tx,
denoise_enabled,
output_volume,
peer_levels,
})
}
pub fn set_volume(&self, volume: f32) {
self.output_volume.store(volume.to_bits(), Ordering::Relaxed);
}
pub fn get_volume(&self) -> f32 {
f32::from_bits(self.output_volume.load(Ordering::Relaxed))
}
pub fn is_denoise_enabled(&self) -> bool {
self.denoise_enabled.load(Ordering::Relaxed)
}
pub fn toggle_denoise(&self) -> bool {
let current = self.denoise_enabled.load(Ordering::Relaxed);
self.denoise_enabled.store(!current, Ordering::Relaxed);
!current
}
pub fn get_peer_levels(&self) -> HashMap<EndpointId, f32> {
self.peer_levels.iter().map(|entry| (*entry.key(), *entry.value())).collect()
}
// Kept for compatibility but unused in Native mode
pub fn start_web(
_net: crate::net::NetworkManager,
_peers: Vec<EndpointId>,
_mic_rx: tokio::sync::broadcast::Receiver<Vec<f32>>,
_broadcast_tx: tokio::sync::broadcast::Sender<WebMediaEvent>,
_mic_bitrate: Arc<AtomicU32>,
) -> Result<Self> {
Err(anyhow!("Web voice not supported in this native build"))
}
/// Stop voice chat.
pub fn stop(&mut self) {
@@ -83,211 +187,362 @@ impl VoiceChat {
task.abort();
}
self.tasks.clear();
if let Some(t) = self.capture_thread.take() {
t.thread().unpark(); // Wake up if sleeping
let _ = t.join();
t.thread().unpark();
}
if let Some(t) = self.playback_thread.take() {
t.thread().unpark();
}
}
/// Handle incoming audio stream (Web Version).
pub async fn handle_incoming_audio_web(
from: iroh::EndpointId,
message: MediaStreamMessage,
mut recv: iroh::endpoint::RecvStream,
broadcast_tx: tokio::sync::broadcast::Sender<WebMediaEvent>,
) -> Result<()> {
// Initialize Opus decoder
let mut decoder = OpusDecoder::new(SampleRate::Hz48000, Channels::Mono)
.map_err(|e| anyhow::anyhow!("Failed to create Opus decoder: {:?}", e))?;
// Process start message
match message {
MediaStreamMessage::AudioStart { .. } => {
tracing::info!("Incoming voice stream started (web) from {}", from);
}
_ => anyhow::bail!("Expected AudioStart"),
/// Handle incoming datagram from Network.
pub fn handle_datagram(&mut self, from: EndpointId, data: Bytes) {
// Packet format: [1 byte TYPE][Opus Data]
if data.len() < 2 {
return;
}
let mut decode_buf = vec![0f32; FRAME_SIZE_SAMPLES];
loop {
let msg: MediaStreamMessage = match decode_framed(&mut recv).await {
Ok(m) => m,
Err(_) => break, // EOF
};
match msg {
MediaStreamMessage::AudioData { opus_data, .. } => {
// Removed `channels` field usage if it existed
match decoder.decode_float(Some(&opus_data), &mut decode_buf, false) {
Ok(len) => {
let samples = decode_buf[..len].to_vec();
// Broadcast to web
let short_id: String = format!("{}", from).chars().take(8).collect();
let _ = broadcast_tx.send(WebMediaEvent::Audio {
peer_id: short_id,
data: samples,
});
}
Err(e) => {
tracing::warn!("Opus decode error: {:?}", e);
}
}
}
MediaStreamMessage::AudioStop => {
tracing::info!("Peer stopped audio");
break;
}
_ => {}
}
// Check type (Audio = 1)
if data[0] != PACKET_TYPE_AUDIO {
return;
}
Ok(())
}
pub fn handle_datagram(
&mut self,
from: iroh::EndpointId,
data: bytes::Bytes,
broadcast_tx: tokio::sync::broadcast::Sender<WebMediaEvent>,
) {
// tracing::info!("Received datagram from {} ({} bytes)", from, data.len());
match postcard::from_bytes::<MediaStreamMessage>(&data) {
Ok(MediaStreamMessage::AudioData {
opus_data,
sequence,
}) => {
if sequence % 50 == 0 {
tracing::info!("Received AudioData seq {} from {}", sequence, from);
}
let decoder = self.datagram_decoders.entry(from).or_insert_with(|| {
tracing::info!("Creating new OpusDecoder for {}", from);
OpusDecoder::new(SampleRate::Hz48000, Channels::Mono)
.expect("Failed to create decoder")
});
// Max frame size is ~120ms (5760 samples). Use safe buffer.
let mut pcm = vec![0.0f32; 5760];
match decoder.decode_float(Some(&opus_data), &mut pcm, false) {
Ok(len) => {
let samples = pcm[..len].to_vec();
let short_id: String = format!("{}", from).chars().take(8).collect();
let _ = broadcast_tx.send(WebMediaEvent::Audio {
peer_id: short_id,
data: samples,
});
}
Err(e) => tracing::warn!("Opus decode error: {:?}", e),
}
let opus_data = &data[1..];
// Get or create decoder/producer for this peer
let (decoder, producer) = self.peer_audio_sinks.entry(from).or_insert_with(|| {
tracing::info!("New voice peer detected: {}", from);
// Create Jitter Buffer (RingBuf)
// 48kHz * 1s buffer
let rb = HeapRb::<f32>::new(48000);
let (prod, cons) = rb.split();
// Send consumer to playback thread
if let Err(e) = self.new_peer_tx.send((from, cons)) {
tracing::error!("Failed to send new peer to playback thread: {}", e);
}
let decoder = OpusDecoder::new(SampleRate::Hz48000, Channels::Mono)
.expect("Failed to create Opus decoder");
(SendDecoder(decoder), SyncAudioProducer(prod))
});
// Decode Opus -> PCM
// Max frame size for 120ms is 5760 samples.
let mut pcm = vec![0.0f32; 5760];
match decoder.0.decode_float(Some(opus_data), &mut pcm, false) {
Ok(len) => {
let samples = &pcm[..len];
producer.push_slice(samples);
}
Err(e) => {
tracing::warn!("Opus decode error: {:?}", e);
}
Ok(_) => {} // Ignore non-audio datagrams
Err(e) => tracing::warn!("Failed to deserialize datagram: {}", e),
}
}
}
// ---------------------------------------------------------------------------
// Opus sender — encodes PCM and sends over QUIC (Multi-Peer)
// Loops
// ---------------------------------------------------------------------------
async fn run_opis_sender_web_multi(
network_manager: crate::net::NetworkManager,
peers: Vec<iroh::EndpointId>,
mut input_rx: tokio::sync::broadcast::Receiver<Vec<f32>>,
fn run_capture_loop(
running: Arc<AtomicBool>,
mic_tx: tokio::sync::broadcast::Sender<Vec<f32>>,
device_name: Option<String>,
denoise_enabled: Arc<AtomicBool>,
) {
let host = cpal::default_host();
// Find device
let device = if let Some(ref name) = device_name {
host.input_devices().ok().and_then(|mut ds| ds.find(|d| d.name().map(|n| n == *name).unwrap_or(false)))
} else {
host.default_input_device()
};
let device = match device {
Some(d) => d,
None => {
tracing::error!("No input device found");
return;
}
};
tracing::info!("Mic opened: {:?}", device.name());
// Configure
let config = match device.default_input_config() {
Ok(c) => c,
Err(e) => {
tracing::error!("Failed to get default input config: {}", e);
return;
}
};
// We try to stick to default but standardise to 1 channel if possible.
let stream_config: cpal::StreamConfig = config.clone().into();
tracing::info!("Input config: {:?}", stream_config);
// Initialize RNNoise
// RNNoise expects chunks of 480 samples (10ms at 48kHz).
let mut denoise_state = DenoiseState::new();
let mut processing_buffer: Vec<f32> = Vec::with_capacity(480 * 2);
let mut out_buf = [0.0f32; DenoiseState::FRAME_SIZE];
let err_fn = |err| tracing::error!("Input stream error: {}", err);
let stream = match config.sample_format() {
cpal::SampleFormat::F32 => {
let running_clone = running.clone();
device.build_input_stream(
&stream_config,
move |data: &[f32], _: &_| {
if !running_clone.load(Ordering::Relaxed) { return; }
// Convert to Mono
let channels = stream_config.channels as usize;
let mono_samples: Vec<f32> = if channels == 1 {
data.to_vec()
} else {
data.chunks(channels).map(|chunk| chunk.iter().sum::<f32>() / channels as f32).collect()
};
if !mono_samples.is_empty() {
let use_denoise = denoise_enabled.load(Ordering::Relaxed);
if use_denoise {
processing_buffer.extend_from_slice(&mono_samples);
while processing_buffer.len() >= DenoiseState::FRAME_SIZE {
let chunk: Vec<f32> = processing_buffer.drain(0..DenoiseState::FRAME_SIZE).collect();
denoise_state.process_frame(&mut out_buf, &chunk);
let _ = mic_tx.send(out_buf.to_vec());
}
} else {
// Pass through
let _ = mic_tx.send(mono_samples);
}
}
},
err_fn,
None
)
},
_ => {
tracing::error!("Input device does not support F32 samples");
return;
}
};
if let Ok(s) = stream {
if let Err(e) = s.play() {
tracing::error!("Failed to play input stream: {}", e);
}
tracing::info!("Voice started (Capture)");
// Keep thread alive
while running.load(Ordering::Relaxed) {
thread::sleep(Duration::from_millis(100));
}
} else if let Err(e) = stream {
tracing::error!("Failed to build input stream: {}", e);
}
}
fn run_playback_loop(
running: Arc<AtomicBool>,
new_peer_rx: Receiver<(EndpointId, AudioConsumer)>,
device_name: Option<String>,
output_volume: Arc<AtomicU32>,
peer_levels: Arc<DashMap<EndpointId, f32>>,
) {
let host = cpal::default_host();
let device = if let Some(ref name) = device_name {
host.output_devices().ok().and_then(|mut ds| ds.find(|d| d.name().map(|n| n == *name).unwrap_or(false)))
} else {
host.default_output_device()
};
let device = match device {
Some(d) => d,
None => {
tracing::error!("No output device found");
return;
}
};
tracing::info!("Speaker opened: {:?}", device.name());
let config = match device.default_output_config() {
Ok(c) => c,
Err(e) => {
tracing::error!("Failed to get default output config: {}", e);
return;
}
};
let stream_config: cpal::StreamConfig = config.clone().into();
// Store consumers for mixing
let mut consumers: Vec<(EndpointId, AudioConsumer)> = Vec::new();
let err_fn = |err| tracing::error!("Output stream error: {}", err);
let stream = match config.sample_format() {
cpal::SampleFormat::F32 => {
let running_clone = running.clone();
device.build_output_stream(
&stream_config,
move |data: &mut [f32], _: &_| {
if !running_clone.load(Ordering::Relaxed) { return; }
let master_vol = f32::from_bits(output_volume.load(Ordering::Relaxed));
// Check for new peers non-blocking
while let Ok((id, c)) = new_peer_rx.try_recv() {
tracing::info!("Adding peer {} to mix", id);
consumers.push((id, c));
}
// Mix
let channels = stream_config.channels as usize;
// We assume we are filling interleaved buffer.
// Our ringbufs are Mono. We duplicate mono to all channels.
// Pre-allocate level accumulators for this frame
// We'll calculate RMS over the whole buffer size for UI visualization
let mut peer_sums: HashMap<EndpointId, f32> = HashMap::new();
let mut peer_counts: HashMap<EndpointId, usize> = HashMap::new();
// Iterate output buffer frame by frame (all channels per sample time)
for frame in data.chunks_mut(channels) {
let mut sum: f32 = 0.0;
// Sum up all peers
for (id, c) in consumers.iter_mut() {
if let Some(sample) = c.try_pop() {
sum += sample;
// Accumulate squared sample for RMS
*peer_sums.entry(*id).or_default() += sample * sample;
*peer_counts.entry(*id).or_default() += 1;
}
}
// Apply master volume
sum *= master_vol;
// Soft clip
let mixed = sum.clamp(-1.0, 1.0);
// Assign to all channels
for sample in frame.iter_mut() {
*sample = mixed;
}
}
// Update peer levels in shared map
for (id, sq_sum) in peer_sums {
let count = peer_counts.get(&id).unwrap_or(&1);
let rms = (sq_sum / *count as f32).sqrt();
// Smooth decay could be implemented here, but for now just raw RMS
peer_levels.insert(id, rms);
}
},
err_fn,
None
)
},
_ => {
tracing::error!("Output device does not support F32 samples");
return;
}
};
if let Ok(s) = stream {
if let Err(e) = s.play() {
tracing::error!("Failed to play output stream: {}", e);
}
while running.load(Ordering::Relaxed) {
thread::sleep(Duration::from_millis(100));
}
} else {
tracing::error!("Failed to build output stream");
}
}
async fn run_network_sender(
net: crate::net::NetworkManager,
peers: Vec<EndpointId>,
mut mic_rx: tokio::sync::broadcast::Receiver<Vec<f32>>,
running: Arc<AtomicBool>,
mic_bitrate: Arc<AtomicU32>,
) -> Result<()> {
if peers.is_empty() {
return Ok(());
}
) {
if peers.is_empty() { return; }
// Connect to all peers to get Connection handles
// Initialize connections
let mut connections = Vec::new();
for peer in peers {
// We use VOICE_ALPN, but for datagrams ALPN matters for connection establishment.
match network_manager
.endpoint
.connect(peer, crate::net::VOICE_ALPN)
.await
{
Ok(conn) => {
connections.push(conn);
}
Err(e) => {
tracing::warn!("Failed to connect to {}: {}", peer, e);
}
match net.endpoint.connect(peer, crate::net::VOICE_ALPN).await {
Ok(conn) => connections.push(conn),
Err(e) => tracing::warn!("Failed to connect to {}: {}", peer, e),
}
}
if connections.is_empty() {
tracing::warn!("No reachable peers for voice chat");
return Ok(());
return;
}
let mut encoder = OpusEncoder::new(SampleRate::Hz48000, Channels::Mono, Application::Voip)
.map_err(|e| anyhow::anyhow!("Failed to create Opus encoder: {:?}", e))?;
.expect("Failed to create Opus encoder");
// Initial bitrate
let _ = encoder.set_bitrate(Bitrate::BitsPerSecond(mic_bitrate.load(Ordering::Relaxed) as i32));
// Set initial bitrate
let current_bitrate = mic_bitrate.load(Ordering::Relaxed);
encoder
.set_bitrate(Bitrate::BitsPerSecond(current_bitrate as i32))
.map_err(|e| anyhow::anyhow!("Failed to set bitrate: {:?}", e))?;
let mut pcm_buffer: Vec<f32> = Vec::with_capacity(FRAME_SIZE_SAMPLES * 2);
let mut opus_buffer = vec![0u8; 1500];
// Opus frame size: 20ms at 48kHz = 960 samples
let frame_size = FRAME_SIZE_SAMPLES;
let mut pcm_buffer: Vec<f32> = Vec::with_capacity(frame_size * 2);
let mut opus_buffer = vec![0u8; 1500]; // MTU-ish
let mut sequence: u64 = 0;
tracing::info!("Starting voice sender loop for {} peers", connections.len());
tracing::info!("Voice Sender: Broadcasting to {} peers", connections.len());
while running.load(Ordering::Relaxed) {
// ... bitrate check ...
// Update bitrate
let bitrate = mic_bitrate.load(Ordering::Relaxed);
let _ = encoder.set_bitrate(Bitrate::BitsPerSecond(bitrate as i32));
// Receive PCM from Web
match input_rx.recv().await {
match mic_rx.recv().await {
Ok(samples) => {
// tracing::trace!("Received {} audio samples from web", samples.len());
pcm_buffer.extend_from_slice(&samples);
// Process 20ms chunks
while pcm_buffer.len() >= frame_size {
let chunk: Vec<f32> = pcm_buffer.drain(0..frame_size).collect();
while pcm_buffer.len() >= FRAME_SIZE_SAMPLES {
let chunk: Vec<f32> = pcm_buffer.drain(0..FRAME_SIZE_SAMPLES).collect();
match encoder.encode_float(&chunk, &mut opus_buffer) {
Ok(len) => {
let packet = opus_buffer[..len].to_vec();
let msg = MediaStreamMessage::AudioData {
sequence,
opus_data: packet,
};
sequence = sequence.wrapping_add(1);
// Serialize for Datagram
match postcard::to_allocvec(&msg) {
Ok(data) => {
let bytes = bytes::Bytes::from(data);
let mut sent_count = 0;
for (_i, conn) in connections.iter_mut().enumerate() {
if let Err(e) = conn.send_datagram(bytes.clone()) {
tracing::debug!("Failed to send datagram: {}", e);
} else {
sent_count += 1;
}
}
if sent_count > 0 && sequence % 50 == 0 {
tracing::info!(
"Sent audio datagram seq {} to {} peers",
sequence,
sent_count
);
}
let opus_packet = &opus_buffer[..len];
// Construct Datagram: [TYPE=1][OPUS]
let mut datagram = Vec::with_capacity(1 + len);
datagram.push(PACKET_TYPE_AUDIO);
datagram.extend_from_slice(opus_packet);
let bytes = Bytes::from(datagram);
// Send to all peers
for conn in &mut connections {
if let Err(e) = conn.send_datagram(bytes.clone()) {
// Don't log every failure for datagrams (spammy)
tracing::debug!("Datagram send error: {}", e);
}
Err(e) => tracing::error!("Serialization error: {}", e),
}
}
Err(e) => {
tracing::error!("Opus encode error: {:?}", e);
}
Err(e) => tracing::error!("Opus encode error: {:?}", e),
}
}
}
@@ -297,6 +552,4 @@ async fn run_opis_sender_web_multi(
}
}
}
Ok(())
}

View File

@@ -65,12 +65,14 @@ pub enum NetEvent {
}
/// Information about a connected peer.
#[derive(Debug, Clone)]
#[derive(Debug, Clone, serde::Serialize)]
pub struct PeerInfo {
pub id: EndpointId,
pub name: Option<String>,
pub capabilities: Option<protocol::CapabilitiesMessage>,
pub is_self: bool,
#[serde(skip)]
pub audio_level: f32,
}
/// Manages the iroh networking stack.
@@ -204,6 +206,7 @@ impl NetworkManager {
name: None,
capabilities: None,
is_self: false,
audio_level: 0.0,
});
}
let _ = event_tx.send(NetEvent::PeerUp(peer_id)).await;

View File

@@ -15,6 +15,7 @@ use ratatui::Frame;
use crate::chat::ChatState;
use crate::file_transfer::FileTransferManager;
use crate::app_logic::AppCommand;
use crate::media::MediaState;
use crate::net::PeerInfo;
@@ -27,26 +28,6 @@ pub enum InputMode {
FilePrompt,
}
/// Commands produced by TUI event handling.
#[derive(Debug)]
pub enum TuiCommand {
SendMessage(String),
/// Local-only system message (not broadcast to peers).
SystemMessage(String),
SendFile(PathBuf),
AcceptFile(String), // file_id prefix
ChangeNick(String),
Connect(String),
ToggleVoice,
ToggleCamera,
ToggleScreen,
SetBitrate(u32),
Leave,
Quit,
None,
}
use crate::config::Theme;
/// Application state for the TUI.
@@ -76,7 +57,7 @@ impl App {
}
/// Handle a key event and return a command.
pub fn handle_key(&mut self, key: KeyEvent) -> TuiCommand {
pub fn handle_key(&mut self, key: KeyEvent) -> AppCommand {
match self.input_mode {
InputMode::FilePrompt => self.handle_file_prompt_key(key),
InputMode::Editing => self.handle_editing_key(key),
@@ -84,7 +65,7 @@ impl App {
}
}
fn handle_editing_key(&mut self, key: KeyEvent) -> TuiCommand {
fn handle_editing_key(&mut self, key: KeyEvent) -> AppCommand {
match key.code {
KeyCode::Enter => {
if !self.input.is_empty() {
@@ -98,65 +79,64 @@ impl App {
"nick" | "name" => {
let new_name = parts.get(1).unwrap_or(&"").trim();
if new_name.is_empty() {
return TuiCommand::SystemMessage(
return AppCommand::SystemMessage(
"Usage: /nick <new_name>".to_string(),
);
}
return TuiCommand::ChangeNick(new_name.to_string());
return AppCommand::ChangeNick(new_name.to_string());
}
"connect" | "join" => {
let peer_id = parts.get(1).unwrap_or(&"").trim();
if peer_id.is_empty() {
return TuiCommand::SystemMessage(
return AppCommand::SystemMessage(
"Usage: /connect <peer_id>".to_string(),
);
}
return TuiCommand::Connect(peer_id.to_string());
return AppCommand::Connect(peer_id.to_string());
}
"voice" => return TuiCommand::ToggleVoice,
"voice" => return AppCommand::ToggleVoice,
// mic/speaker commands removed
"camera" | "cam" => return TuiCommand::ToggleCamera,
"screen" | "share" => return TuiCommand::ToggleScreen,
"screen" | "share" => return AppCommand::ToggleScreen,
"file" | "send" => {
let path = parts.get(1).unwrap_or(&"").trim();
if path.is_empty() {
// Open native file dialog via rfd (cross-platform)
if let Some(file) = rfd::FileDialog::new().pick_file() {
return TuiCommand::SendFile(file);
return AppCommand::SendFile(file);
}
return TuiCommand::None; // cancelled
return AppCommand::None; // cancelled
}
return TuiCommand::SendFile(PathBuf::from(path));
return AppCommand::SendFile(PathBuf::from(path));
}
"accept" | "a" => {
let id_prefix = parts.get(1).unwrap_or(&"").trim();
if id_prefix.is_empty() {
return TuiCommand::SystemMessage(
return AppCommand::SystemMessage(
"Usage: /accept <file_id_prefix>".to_string(),
);
}
return TuiCommand::AcceptFile(id_prefix.to_string());
return AppCommand::AcceptFile(id_prefix.to_string());
}
"quit" | "q" => return TuiCommand::Quit,
"leave" => return TuiCommand::Leave,
"quit" | "q" => return AppCommand::Quit,
"leave" => return AppCommand::Leave,
"help" => {
return TuiCommand::SystemMessage(
"Commands: /nick <name>, /connect <id>, /voice, /camera, /screen, /file <path>, /accept <prefix>, /leave, /quit".to_string(),
return AppCommand::SystemMessage(
"Commands: /nick <name>, /connect <id>, /voice, /screen, /file <path>, /accept <prefix>, /leave, /quit".to_string(),
);
}
"bitrate" => {
let kbps_str = parts.get(1).unwrap_or(&"").trim();
if let Ok(kbps) = kbps_str.parse::<u32>() {
return TuiCommand::SetBitrate(kbps * 1000);
return AppCommand::SetBitrate(kbps * 1000);
} else {
return TuiCommand::SystemMessage(
return AppCommand::SystemMessage(
"Usage: /bitrate <kbps> (e.g. 128)".to_string(),
);
}
}
_ => {
return TuiCommand::SystemMessage(format!(
return AppCommand::SystemMessage(format!(
"Unknown command: /{}. Type /help",
parts[0]
));
@@ -164,114 +144,114 @@ impl App {
}
}
return TuiCommand::SendMessage(text);
return AppCommand::SendMessage(text);
}
TuiCommand::None
AppCommand::None
}
KeyCode::Char(c) => {
self.input.insert(self.cursor_position, c);
self.cursor_position += 1;
TuiCommand::None
AppCommand::None
}
KeyCode::Backspace => {
if self.cursor_position > 0 {
self.cursor_position -= 1;
self.input.remove(self.cursor_position);
}
TuiCommand::None
AppCommand::None
}
KeyCode::Delete => {
if self.cursor_position < self.input.len() {
self.input.remove(self.cursor_position);
}
TuiCommand::None
AppCommand::None
}
KeyCode::Left => {
if self.cursor_position > 0 {
self.cursor_position -= 1;
}
TuiCommand::None
AppCommand::None
}
KeyCode::Right => {
if self.cursor_position < self.input.len() {
self.cursor_position += 1;
}
TuiCommand::None
AppCommand::None
}
KeyCode::Home => {
self.cursor_position = 0;
TuiCommand::None
AppCommand::None
}
KeyCode::End => {
self.cursor_position = self.input.len();
TuiCommand::None
AppCommand::None
}
KeyCode::Esc => {
self.input_mode = InputMode::Normal;
TuiCommand::None
AppCommand::None
}
KeyCode::Up => {
self.scroll_offset = self.scroll_offset.saturating_add(1);
TuiCommand::None
AppCommand::None
}
KeyCode::Down => {
self.scroll_offset = self.scroll_offset.saturating_sub(1);
TuiCommand::None
AppCommand::None
}
_ => TuiCommand::None,
_ => AppCommand::None,
}
}
fn handle_normal_key(&mut self, key: KeyEvent) -> TuiCommand {
fn handle_normal_key(&mut self, key: KeyEvent) -> AppCommand {
match key.code {
KeyCode::Char('q') | KeyCode::Char('Q') => TuiCommand::Quit,
KeyCode::Char('q') | KeyCode::Char('Q') => AppCommand::Quit,
KeyCode::Char('/') => {
self.input_mode = InputMode::Editing;
self.input.push('/');
self.cursor_position = 1;
TuiCommand::None
AppCommand::None
}
KeyCode::Char('i') | KeyCode::Enter => {
self.input_mode = InputMode::Editing;
TuiCommand::None
AppCommand::None
}
KeyCode::Up => {
self.scroll_offset = self.scroll_offset.saturating_add(1);
TuiCommand::None
AppCommand::None
}
KeyCode::Down => {
self.scroll_offset = self.scroll_offset.saturating_sub(1);
TuiCommand::None
AppCommand::None
}
_ => TuiCommand::None,
_ => AppCommand::None,
}
}
fn handle_file_prompt_key(&mut self, key: KeyEvent) -> TuiCommand {
fn handle_file_prompt_key(&mut self, key: KeyEvent) -> AppCommand {
match key.code {
KeyCode::Enter => {
if !self.file_path_input.is_empty() {
let path = PathBuf::from(self.file_path_input.drain(..).collect::<String>());
self.input_mode = InputMode::Editing;
return TuiCommand::SendFile(path);
return AppCommand::SendFile(path);
}
self.input_mode = InputMode::Editing;
TuiCommand::None
AppCommand::None
}
KeyCode::Char(c) => {
self.file_path_input.push(c);
TuiCommand::None
AppCommand::None
}
KeyCode::Backspace => {
self.file_path_input.pop();
TuiCommand::None
AppCommand::None
}
KeyCode::Esc => {
self.file_path_input.clear();
self.input_mode = InputMode::Editing;
TuiCommand::None
AppCommand::None
}
_ => TuiCommand::None,
_ => AppCommand::None,
}
}
}

View File

@@ -45,11 +45,11 @@ pub fn render(
Span::raw(" "),
conn_status,
Span::styled(
format!(" {} ({})", our_name, our_id_short),
format!(" I {} ({})", our_name, our_id_short),
Style::default().fg(Color::Cyan),
),
Span::styled(
format!(" {}", media.status_line()),
format!(" I {}", media.status_line()),
Style::default().fg(Color::DarkGray),
),
]);

View File

@@ -21,7 +21,6 @@ use tokio::sync::broadcast;
struct AppState {
tx: broadcast::Sender<WebMediaEvent>,
mic_tx: broadcast::Sender<Vec<f32>>,
cam_tx: broadcast::Sender<Vec<u8>>,
screen_tx: broadcast::Sender<Vec<u8>>,
}
@@ -32,17 +31,16 @@ struct Assets;
pub async fn start_web_server(
tx: broadcast::Sender<WebMediaEvent>,
mic_tx: broadcast::Sender<Vec<f32>>,
cam_tx: broadcast::Sender<Vec<u8>>,
screen_tx: broadcast::Sender<Vec<u8>>,
) {
let state = AppState {
tx,
mic_tx,
cam_tx,
screen_tx,
};
let app = Router::new()
.route("/ws", get(ws_handler))
.route("/ws/audio", get(ws_audio_handler))
.route("/ws/screen", get(ws_screen_handler))
.fallback(static_handler)
.with_state(state);
@@ -66,103 +64,87 @@ pub async fn start_web_server(
axum::serve(listener, app).await.unwrap();
}
async fn ws_handler(ws: WebSocketUpgrade, State(state): State<AppState>) -> impl IntoResponse {
ws.on_upgrade(move |socket| handle_socket(socket, state))
// --- AUDIO ---
async fn ws_audio_handler(ws: WebSocketUpgrade, State(state): State<AppState>) -> impl IntoResponse {
ws.on_upgrade(move |socket| handle_audio_socket(socket, state))
}
async fn handle_socket(socket: WebSocket, state: AppState) {
async fn handle_audio_socket(socket: WebSocket, state: AppState) {
let (mut sender, mut receiver) = socket.split();
let mut rx = state.tx.subscribe();
// Outgoing (Server -> Browser)
tokio::spawn(async move {
while let Ok(event) = rx.recv().await {
let msg = match event {
WebMediaEvent::Audio {
peer_id,
data: samples,
} => {
// 1 byte header (0) + 1 byte ID len + ID bytes + f32 bytes
let id_bytes = peer_id.as_bytes();
let id_len = id_bytes.len() as u8;
let mut payload =
Vec::with_capacity(1 + 1 + id_bytes.len() + samples.len() * 4);
payload.push(0u8);
payload.push(id_len);
payload.extend_from_slice(id_bytes);
for s in samples {
payload.extend_from_slice(&s.to_ne_bytes());
}
Message::Binary(Bytes::from(payload))
if let WebMediaEvent::Audio { peer_id, data: samples } = event {
// Protocol: [IDLen] [ID] [f32...]
let id_bytes = peer_id.as_bytes();
let id_len = id_bytes.len() as u8;
let mut payload = Vec::with_capacity(1 + id_bytes.len() + samples.len() * 4);
payload.push(id_len);
payload.extend_from_slice(id_bytes);
for s in samples {
payload.extend_from_slice(&s.to_ne_bytes());
}
WebMediaEvent::Video {
peer_id,
kind,
data,
} => {
// 1 byte header (1=Camera, 2=Screen) + 1 byte ID len + ID bytes + WebP data
let header = match kind {
MediaKind::Camera => 1u8,
MediaKind::Screen => 2u8,
_ => 1u8,
};
let id_bytes = peer_id.as_bytes();
let id_len = id_bytes.len() as u8;
let mut payload = Vec::with_capacity(1 + 1 + id_bytes.len() + data.len());
payload.push(header);
payload.push(id_len);
payload.extend_from_slice(id_bytes);
payload.extend_from_slice(&data);
Message::Binary(Bytes::from(payload))
if sender.send(Message::Binary(Bytes::from(payload))).await.is_err() {
break;
}
};
if sender.send(msg).await.is_err() {
break;
}
}
});
// Incoming (Browser -> Server)
while let Some(msg) = receiver.next().await {
match msg {
Ok(Message::Binary(data)) => {
if data.is_empty() {
continue;
}
let header = data[0];
let payload = &data[1..];
match header {
3 => {
// Mic (f32 PCM)
// integrity check
if payload.len() % 4 == 0 {
let samples: Vec<f32> = payload
.chunks_exact(4)
.map(|b| f32::from_ne_bytes([b[0], b[1], b[2], b[3]]))
.collect();
// tracing::debug!("Received mic samples: {}", samples.len());
let _ = state.mic_tx.send(samples);
}
}
4 => {
// Camera (WebP)
tracing::debug!("Received camera frame: {} bytes", payload.len());
let _ = state.cam_tx.send(payload.to_vec());
}
5 => {
// Screen (WebP)
tracing::debug!("Received screen frame: {} bytes", payload.len());
let _ = state.screen_tx.send(payload.to_vec());
}
_ => {
tracing::warn!("Unknown WS header: {}", header);
}
}
if let Ok(Message::Binary(data)) = msg {
// Protocol: [f32...]
// (We dropped the header byte 3)
if data.len() % 4 == 0 {
let samples: Vec<f32> = data
.chunks_exact(4)
.map(|b| f32::from_ne_bytes([b[0], b[1], b[2], b[3]]))
.collect();
let _ = state.mic_tx.send(samples);
}
Ok(Message::Close(_)) => break,
Err(_) => break,
_ => {}
}
}
}
// --- SCREEN ---
async fn ws_screen_handler(ws: WebSocketUpgrade, State(state): State<AppState>) -> impl IntoResponse {
ws.on_upgrade(move |socket| handle_screen_socket(socket, state))
}
async fn handle_screen_socket(socket: WebSocket, state: AppState) {
let (mut sender, mut receiver) = socket.split();
let mut rx = state.tx.subscribe();
// Outgoing (Server -> Browser)
tokio::spawn(async move {
while let Ok(event) = rx.recv().await {
if let WebMediaEvent::Video { peer_id, kind, data } = event {
if matches!(kind, MediaKind::Screen) {
let id_bytes = peer_id.as_bytes();
let id_len = id_bytes.len() as u8;
let mut payload = Vec::with_capacity(1 + id_bytes.len() + data.len());
payload.push(id_len);
payload.extend_from_slice(id_bytes);
payload.extend_from_slice(&data);
if sender.send(Message::Binary(Bytes::from(payload))).await.is_err() {
break;
}
}
}
}
});
// Incoming (Browser -> Server)
while let Some(msg) = receiver.next().await {
if let Ok(Message::Binary(data)) = msg {
// Protocol: [FrameType] [Data...]
tracing::debug!("Received screen frame: {} bytes", data.len());
let _ = state.screen_tx.send(data.to_vec());
}
}
}

View File

@@ -15,12 +15,18 @@ let micScriptProcessor = null;
let audioCtx = null;
const SAMPLE_RATE = 48000;
// Video Encoding State
let videoEncoder = null;
let screenEncoder = null;
let screenCanvasLoop = null; // Added
let frameCounter = 0;
// --- Remote Peer State ---
// Map<peerId, {
// id: string,
// nextStartTime: number,
// cam: { card: HTMLElement, img: HTMLElement, status: HTMLElement } | null,
// screen: { card: HTMLElement, img: HTMLElement, status: HTMLElement } | null,
// cam: { card: HTMLElement, canvas: HTMLCanvasElement, decoder: VideoDecoder, status: HTMLElement } | null,
// screen: { card: HTMLElement, canvas: HTMLCanvasElement, decoder: VideoDecoder, status: HTMLElement } | null,
// }>
const peers = new Map();
@@ -69,7 +75,7 @@ ws.onmessage = (event) => {
// Extract ID
const idBytes = new Uint8Array(data, 2, idLen);
const peerId = new TextDecoder().decode(idBytes);
let peerId = new TextDecoder().decode(idBytes);
// Extract Payload
const payload = data.slice(2 + idLen);
@@ -84,6 +90,7 @@ ws.onmessage = (event) => {
screen: null
};
peers.set(peerId, peer);
handlePeerConnected(peer); // Call new handler for peer connection
}
if (header === 0) { // Audio
@@ -103,11 +110,11 @@ function getOrCreateCard(peer, type) {
card.className = 'peer-card';
card.id = `peer-${peer.id}-${type}`;
// Video/Image element
const img = document.createElement('img');
img.className = 'peer-video';
img.alt = `${type} from ${peer.id}`;
card.appendChild(img);
// Video canvas element
const canvas = document.createElement('canvas');
canvas.className = 'peer-video';
// canvas.alt = `${type} from ${peer.id}`;
card.appendChild(canvas);
// Overlay info
const info = document.createElement('div');
@@ -123,9 +130,41 @@ function getOrCreateCard(peer, type) {
videoGrid.appendChild(card);
// Initialize VideoDecoder
const decoder = new VideoDecoder({
output: (frame) => {
// Draw frame to canvas
console.debug(`[Decoder] Frame decoded: ${frame.displayWidth}x${frame.displayHeight}`);
canvas.width = frame.displayWidth;
canvas.height = frame.displayHeight;
const ctx = canvas.getContext('2d');
ctx.drawImage(frame, 0, 0);
frame.close();
updatePeerActivity(cardObj, false);
},
error: (e) => {
console.error(`[Decoder] Error (${type}):`, e);
statusOverlay.style.display = 'flex';
let statusText = `Decoding H.264 from ${peer.id}...`;
statusOverlay.querySelector('h2').textContent = `${statusText} Video Decoder Error: ${e.message}`;
}
});
console.log(`[Decoder] Configuring H.264 decoder for ${peer.id} (${type})`);
try {
decoder.configure({
codec: 'avc1.42E01E', // H.264 Constrained Baseline
optimizeForLatency: true
});
} catch (err) {
console.error(`[Decoder] Configuration failed:`, err);
}
const cardObj = {
card: card,
imgElement: img,
canvas: canvas,
decoder: decoder,
statusElement: info.querySelector('.peer-status'),
activityTimeout: null
};
@@ -166,18 +205,27 @@ function handleRemoteAudio(peer, arrayBuffer) {
function handleRemoteVideo(peer, arrayBuffer, type) {
const cardObj = getOrCreateCard(peer, type);
const blob = new Blob([arrayBuffer], { type: 'image/webp' });
const url = URL.createObjectURL(blob);
const prevUrl = cardObj.imgElement.src;
cardObj.imgElement.onload = () => {
if (prevUrl && prevUrl.startsWith('blob:')) {
URL.revokeObjectURL(prevUrl);
// Payload format: [1 byte frame type] [N bytes encoded chunk]
// Frame Type: 0 = Key, 1 = Delta
const view = new DataView(arrayBuffer);
const isKey = view.getUint8(0) === 0;
const chunkData = arrayBuffer.slice(1);
const chunk = new EncodedVideoChunk({
type: isKey ? 'key' : 'delta',
timestamp: performance.now() * 1000, // Use local time for now, or derive from seq
data: chunkData
});
try {
if (cardObj.decoder.state === 'configured') {
cardObj.decoder.decode(chunk);
} else {
console.warn(`[Decoder] Not configured yet, dropping chunk (Key: ${isKey})`);
}
};
cardObj.imgElement.src = url;
updatePeerActivity(cardObj, false);
} catch (e) {
console.error("[Decoder] Decode exception:", e);
}
}
function updatePeerActivity(cardObj, isAudio) {
@@ -191,6 +239,22 @@ function updatePeerActivity(cardObj, isAudio) {
}
}
function handlePeerConnected(peer) {
// Peer connected (or local preview)
console.log(`[App] Peer connected: ${peer.id}`);
// Create card if not exists
let cardObj = getOrCreateCard(peer, 'cam'); // Assume cam card for general peer info
// If it's local preview, update the status
if (peer.id === 'local') {
const statusDot = cardObj.card.querySelector('.peer-status');
if (statusDot) statusDot.style.backgroundColor = '#3b82f6';
const nameLabel = cardObj.card.querySelector('.peer-name'); // Updated selector
if (nameLabel) nameLabel.textContent = "Local Preview (H.264)";
}
}
// --- Local Capture Controls ---
function updateButton(btn, active, iconOn, iconOff) {
@@ -294,12 +358,39 @@ function stopMic() {
async function startCam() {
try {
camStream = await navigator.mediaDevices.getUserMedia({ video: { width: 640, height: 480 } });
localVideo.srcObject = camStream;
startVideoSender(camStream, 4); // 4 = Camera
// Backend now handles capture. We just wait for "local" stream.
// But we might want to tell backend to start?
// Currently backend starts on TUI command /cam.
// Ideally we should have a /cam endpoint or message?
// For now, this button is purely cosmetic if backend is controlled via TUI,
// OR we implemented the /cam command in TUI.
// User asked to Encode on Backend.
// Let's assume user uses TUI /cam or we trigger it via existing mechanism?
// Wait, start_web_server doesn't listen to commands from web.
// The buttons in web UI were starting *browser* capture.
// If we want the Web UI button to start backend capture, we need an endpoint.
// Since we don't have one easily, let's just show a message or assume TUI control.
// BUT, the existing implementation (VoiceChat::start_web) was triggered by TUI.
// The Web UI was just sending data.
// Actually, previous flow was:
// 1. TUI /cam -> calls toggle_camera
// 2. toggle_camera -> calls VideoCapture::start_web -> spawns task receiving from channel
// 3. Web UI -> captures video -> sends to WS -> WS handler sends to channel
// New flow:
// 1. TUI /cam -> calls toggle_camera -> calls Start Native -> Spawns ffmpeg -> sends to broadcast
// 2. Web UI -> receives broadcast -> renders
// So the "Start Camera" button in Web UI is now USELESS/Misleading if it tries to do browser capture.
// It should probably be removed or replaced with "Status".
alert("Please use /cam in the terminal to start the camera (Backend Encoding).");
} catch (err) {
console.error('Error starting camera:', err);
alert('Camera access failed');
alert('Failed to start camera');
updateButton(toggleCamBtn, false, 'videocam', 'videocam_off');
}
}
@@ -308,62 +399,136 @@ function stopCam() {
camStream.getTracks().forEach(t => t.stop());
camStream = null;
}
if (videoEncoder) {
// We don't close the encoder, just stop feeding it?
// Or re-create it? Let's keep it but stop the reader loop which is tied to the track.
// Actually, send a config reset next time?
}
}
//let screenStream = null;
// Helper to read frames from the stream
async function readLoop(reader, encoder) {
while (true) {
const { done, value } = await reader.read();
if (done) break;
if (encoder.state === "configured") {
encoder.encode(value);
value.close();
} else {
value.close();
}
}
}
async function startScreen() {
try {
screenStream = await navigator.mediaDevices.getDisplayMedia({ video: true });
localVideo.srcObject = screenStream;
// Hybrid Mode: Browser Capture + Backend Relay
screenStream = await navigator.mediaDevices.getDisplayMedia({
video: {
cursor: "always"
},
audio: false
});
const track = screenStream.getVideoTracks()[0];
const { width, height } = track.getSettings();
startVideoSender(screenStream, 5); // 5 = Screen
screenStream.getVideoTracks()[0].onended = () => {
stopScreen();
updateButton(toggleScreenBtn, false, 'screen_share', 'screen_share');
};
// 1. Setup Local Preview (Draw to Canvas)
const localCardObj = getOrCreateCard({ id: 'local' }, 'screen');
const canvas = localCardObj.canvas;
const ctx = canvas.getContext('2d');
// Create a temp video element to play the stream for drawing
const tempVideo = document.createElement('video');
tempVideo.autoplay = true;
tempVideo.srcObject = screenStream;
tempVideo.muted = true;
await tempVideo.play();
// Canvas drawing loop
function drawLoop() {
if (tempVideo.paused || tempVideo.ended) return;
if (canvas.width !== tempVideo.videoWidth || canvas.height !== tempVideo.videoHeight) {
canvas.width = tempVideo.videoWidth;
canvas.height = tempVideo.videoHeight;
}
ctx.drawImage(tempVideo, 0, 0);
screenCanvasLoop = requestAnimationFrame(drawLoop);
}
drawLoop();
// 2. Encode and Send to Backend (for Peers)
// Config H.264 Encoder
screenEncoder = new VideoEncoder({
output: (chunk, metadata) => {
const buffer = new Uint8Array(chunk.byteLength);
chunk.copyTo(buffer);
// Construct Header: [5 (Screen)] [FrameType] [Data]
// Chunk type: key=1, delta=0? No, EncodedVideoChunkType key/delta
const isKey = chunk.type === 'key';
const frameType = isKey ? 0 : 1;
const payload = new Uint8Array(1 + 1 + buffer.length);
payload[0] = 5; // Screen Header
payload[1] = frameType;
payload.set(buffer, 2);
if (ws && ws.readyState === WebSocket.OPEN) {
ws.send(payload);
}
},
error: (e) => console.error("Screen Encoder Error:", e)
});
screenEncoder.configure({
codec: 'avc1.42E01E', // H.264 Baseline
width: width,
height: height,
bitrate: 3_000_000, // 3Mbps
framerate: 30
});
// Reader
const processor = new MediaStreamTrackProcessor({ track });
const reader = processor.readable.getReader();
readLoop(reader, screenEncoder);
updateButton(toggleScreenBtn, true, 'stop_screen_share', 'stop_screen_share');
// Clean up on stop
track.onended = () => stopScreen();
} catch (err) {
console.error('Error starting screen:', err);
alert(`Failed to start screen share: ${err.message}. \n(Make sure to run /screen in terminal first!)`);
updateButton(toggleScreenBtn, false, 'screen_share', 'screen_share');
}
}
function stopScreen() {
async function stopScreen() {
if (screenStream) {
screenStream.getTracks().forEach(t => t.stop());
screenStream = null;
}
if (screenEncoder) {
screenEncoder.close();
screenEncoder = null;
}
if (screenCanvasLoop) {
cancelAnimationFrame(screenCanvasLoop);
screenCanvasLoop = null;
}
const localCardObj = getOrCreateCard({ id: 'local' }, 'screen');
const ctx = localCardObj.canvas.getContext('2d');
ctx.clearRect(0, 0, localCardObj.canvas.width, localCardObj.canvas.height);
updateButton(toggleScreenBtn, false, 'screen_share', 'screen_share');
}
function startVideoSender(stream, headerByte) {
const video = document.createElement('video');
video.srcObject = stream;
video.play();
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d');
const sendFrame = () => {
if (!stream.active) return;
if (video.readyState === video.HAVE_ENOUGH_DATA) {
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
ctx.drawImage(video, 0, 0);
canvas.toBlob((blob) => {
if (!blob) return;
const reader = new FileReader();
reader.onloadend = () => {
if (ws.readyState === WebSocket.OPEN) {
const arrayBuffer = reader.result;
const buffer = new ArrayBuffer(1 + arrayBuffer.byteLength);
const view = new Uint8Array(buffer);
view[0] = headerByte;
view.set(new Uint8Array(arrayBuffer), 1);
ws.send(buffer);
}
};
reader.readAsArrayBuffer(blob);
}, 'image/webp', 0.6);
}
setTimeout(sendFrame, 100); // 10 FPS
};
sendFrame();
}

View File

@@ -83,7 +83,7 @@ body {
align-items: center;
}
.peer-card img, .peer-card video {
.peer-card img, .peer-card video, .peer-card canvas {
width: 100%;
height: 100%;
object-fit: contain; /* or cover if preferred */