windows!(

This commit is contained in:
mixa
2026-02-12 21:06:34 +03:00
parent 61e955a695
commit a3cb75489a
17 changed files with 625 additions and 700 deletions

726
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -39,10 +39,10 @@ toml = "0.7"
directories = "5.0"
# Media
pipewire = "0.9"
libspa = "0.9"
songbird = { version = "0.4", features = ["builtin-queue"] }
audiopus = "0.2"
rfd = "0.14"
crossbeam-channel = "0.5"
axum = { version = "0.8.8", features = ["ws"] }
tokio-stream = "0.1.18"

51
README.md Normal file
View File

@@ -0,0 +1,51 @@
# P2P Chat & File Transfer
A secure, serverless peer-to-peer chat application built with Rust and iroh.
## Features
- **Decentralized**: No central server; peers connect directly via QUIC and NAT traversal.
- **Commands**:
- `/connect <peer_id>`: Connect to a peer.
- `/nick <name>`: Set your display name.
- `/file <path>`: Send a file.
- `/accept <file_id_prefix>`: Accept a file transfer.
- `/mic`: Select microphone input.
- `/speaker`: Select speaker output.
- `/bitrate <kbps>`: Set audio bitrate.
- **Cross-Platform**: Works on Linux, Windows, and macOS.
- File sharing supported on all platforms.
- Voice/Screen share (Linux only for now, requires PipeWire).
## Installation
1. Install Rust: https://rustup.rs/
2. Clone the repo:
```bash
git clone https://github.com/yourusername/p2p-chat.git
cd p2p-chat
```
3. Build & Run:
```bash
cargo run --release
```
## Usage
1. **Start the app**. You will see your **Peer ID** in the top bar.
2. **Share your Peer ID** with a friend.
3. **Connect**: Type `/connect <friend_peer_id>`.
4. **Chat**: Type messages and press Enter.
5. **Send Files**:
- Type `/file` to open a file picker.
- Or `/file /path/to/file` to send immediately.
- The recipient must type `/accept <file_id>` (or click accept if TUI supports mouse).
## Configuration
Configuration is stored in:
- Linux: `~/.config/p2p-chat/config.toml`
- Windows: `%APPDATA%\p2p-chat\config.toml`
- macOS: `~/Library/Application Support/p2p-chat/config.toml`
You can customize colors, default resolution, interface, etc.

View File

@@ -157,25 +157,7 @@ impl AppLogic {
"Session ended. Use /connect <peer_id> to start a new session.".to_string(),
);
}
TuiCommand::SelectMic(node_name) => {
self.media.set_mic_name(Some(node_name.clone()));
if self.media.voice_enabled() {
self.chat
.add_system_message("Restarting voice with new mic...".to_string());
// Toggle off
let _ = self.media.toggle_voice(self.net.clone()).await;
// Toggle on
let status = self.media.toggle_voice(self.net.clone()).await;
self.chat.add_system_message(status.to_string());
}
self.chat
.add_system_message(format!("🎤 Mic set to: {}", node_name));
// Save to config
if let Ok(mut cfg) = AppConfig::load() {
cfg.media.mic_name = Some(node_name);
let _ = cfg.save();
}
}
TuiCommand::SetBitrate(bps) => {
self.media.set_bitrate(bps);
self.chat
@@ -186,22 +168,7 @@ impl AppLogic {
let _ = cfg.save();
}
}
TuiCommand::SelectSpeaker(node_name) => {
self.media.set_speaker_name(Some(node_name.clone()));
self.chat
.add_system_message(format!("🔊 Speaker set to: {}", node_name));
// Save to config
if let Ok(mut cfg) = AppConfig::load() {
cfg.media.speaker_name = Some(node_name);
if let Err(e) = cfg.save() {
tracing::warn!("Failed to save config: {}", e);
}
}
if self.media.voice_enabled() {
self.chat
.add_system_message("Restart voice chat to apply changes.".to_string());
}
}
TuiCommand::AcceptFile(prefix) => {
// Find matching transfer
let transfers = self.file_mgr.transfers.lock().unwrap();
@@ -230,13 +197,15 @@ impl AppLogic {
};
// Update state to Requesting so we auto-accept when stream comes
let expires_at = std::time::Instant::now() + std::time::Duration::from_secs(60);
let expires_at =
std::time::Instant::now() + std::time::Duration::from_secs(60);
{
// We need to upgrade the lock or re-acquire?
// We dropped transfers lock at line 221.
let mut transfers = self.file_mgr.transfers.lock().unwrap();
if let Some(t) = transfers.get_mut(&info.file_id) {
t.state = crate::file_transfer::TransferState::Requesting { expires_at };
t.state =
crate::file_transfer::TransferState::Requesting { expires_at };
}
}

View File

@@ -59,9 +59,6 @@ impl Default for NetworkConfig {
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct MediaConfig {
pub screen_resolution: String,
pub mic_name: Option<String>,
pub speaker_name: Option<String>,
#[serde(default = "default_bitrate")]
pub mic_bitrate: u32,
}
@@ -73,9 +70,6 @@ fn default_bitrate() -> u32 {
impl Default for MediaConfig {
fn default() -> Self {
Self {
screen_resolution: "1280x720".to_string(),
mic_name: None,
speaker_name: None,
mic_bitrate: 128000,
}
}
@@ -83,18 +77,14 @@ impl Default for MediaConfig {
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct UiConfig {
pub border: String,
pub text: String,
#[serde(default = "default_cyan")]
pub chat_border: String,
#[serde(default = "default_cyan")]
pub peer_border: String,
#[serde(default = "default_cyan")]
pub input_border: String,
#[serde(default = "default_yellow")]
pub transfer_border: String,
pub self_name: String,
pub peer_name: String,
pub system_msg: String,
pub time: String,
@@ -104,27 +94,21 @@ pub struct UiConfig {
pub error: String,
#[serde(default)]
pub warning: String,
#[serde(default)]
pub info: String,
}
impl Default for UiConfig {
fn default() -> Self {
Self {
border: "cyan".to_string(),
text: "white".to_string(),
self_name: "green".to_string(),
peer_name: "magenta".to_string(),
system_msg: "yellow".to_string(),
time: "dark_gray".to_string(),
chat_border: "cyan".to_string(),
peer_border: "cyan".to_string(),
input_border: "cyan".to_string(),
transfer_border: "yellow".to_string(),
success: "green".to_string(),
error: "red".to_string(),
warning: "yellow".to_string(),
info: "cyan".to_string(),
}
}
}
@@ -218,39 +202,31 @@ pub fn parse_color(color_str: &str) -> Color {
// Runtime Theme struct derived from config
#[derive(Debug, Clone)]
pub struct Theme {
pub border: Color,
pub chat_border: Color,
pub peer_border: Color,
pub input_border: Color,
pub transfer_border: Color,
pub text: Color,
pub self_name: Color,
pub peer_name: Color,
pub system_msg: Color,
pub time: Color,
pub success: Color,
pub error: Color,
pub warning: Color,
pub info: Color,
}
impl From<UiConfig> for Theme {
fn from(cfg: UiConfig) -> Self {
Self {
border: parse_color(&cfg.border),
chat_border: parse_color(&cfg.chat_border),
peer_border: parse_color(&cfg.peer_border),
input_border: parse_color(&cfg.input_border),
transfer_border: parse_color(&cfg.transfer_border),
text: parse_color(&cfg.text),
self_name: parse_color(&cfg.self_name),
peer_name: parse_color(&cfg.peer_name),
system_msg: parse_color(&cfg.system_msg),
time: parse_color(&cfg.time),
success: parse_color(&cfg.success),
error: parse_color(&cfg.error),
warning: parse_color(&cfg.warning),
info: parse_color(&cfg.info),
}
}
}

View File

@@ -38,7 +38,10 @@ pub enum TransferState {
/// Transfer was rejected by the peer.
Rejected { completed_at: std::time::Instant },
/// Transfer failed with an error.
Failed { error: String, completed_at: std::time::Instant },
Failed {
error: String,
completed_at: std::time::Instant,
},
}
/// Information about a tracked file transfer.
@@ -128,7 +131,8 @@ impl FileTransferManager {
file_name,
file_size,
state: TransferState::WaitingForAccept {
expires_at: std::time::Instant::now() + std::time::Duration::from_secs(timeout),
expires_at: std::time::Instant::now()
+ std::time::Duration::from_secs(timeout),
},
is_outgoing: true,
peer: None,
@@ -146,9 +150,13 @@ impl FileTransferManager {
let now = std::time::Instant::now();
for info in transfers.values_mut() {
match info.state {
TransferState::WaitingForAccept { expires_at } | TransferState::Requesting { expires_at } => {
TransferState::WaitingForAccept { expires_at }
| TransferState::Requesting { expires_at } => {
if now > expires_at {
info.state = TransferState::Failed { error: "Timed out".to_string(), completed_at: now };
info.state = TransferState::Failed {
error: "Timed out".to_string(),
completed_at: now,
};
}
}
_ => {}
@@ -156,20 +164,21 @@ impl FileTransferManager {
}
// Remove expired
let to_remove: Vec<FileId> = transfers.iter().filter_map(|(id, info)| {
match info.state {
TransferState::Complete { completed_at } |
TransferState::Rejected { completed_at } |
TransferState::Failed { completed_at, .. } => {
let to_remove: Vec<FileId> = transfers
.iter()
.filter_map(|(id, info)| match info.state {
TransferState::Complete { completed_at }
| TransferState::Rejected { completed_at }
| TransferState::Failed { completed_at, .. } => {
if now.duration_since(completed_at) > std::time::Duration::from_secs(10) {
Some(*id)
} else {
None
}
}
_ => None
}
}).collect();
_ => None,
})
.collect();
for id in to_remove {
transfers.remove(&id);
@@ -188,26 +197,9 @@ impl FileTransferManager {
// `execute_send` logic needs a timeout on `decode_framed`.
}
pub fn accept_transfer(&self, file_id: FileId) -> bool {
let mut pending = self.pending_accepts.lock().unwrap();
if let Some(tx) = pending.remove(&file_id) {
let _ = tx.send(true);
true
} else {
false
}
}
#[allow(dead_code)]
pub fn reject_transfer(&self, file_id: FileId) -> bool {
let mut pending = self.pending_accepts.lock().unwrap();
if let Some(tx) = pending.remove(&file_id) {
let _ = tx.send(false);
true
} else {
false
}
}
/// Execute the sending side of a file transfer over a QUIC bi-stream.
#[allow(dead_code)]
@@ -231,7 +223,9 @@ impl FileTransferManager {
FileStreamMessage::Reject(_) => {
let mut transfers = self.transfers.lock().unwrap();
if let Some(info) = transfers.get_mut(&file_id) {
info.state = TransferState::Rejected { completed_at: std::time::Instant::now() };
info.state = TransferState::Rejected {
completed_at: std::time::Instant::now(),
};
}
return Ok(());
}
@@ -281,7 +275,9 @@ impl FileTransferManager {
{
let mut transfers = self.transfers.lock().unwrap();
if let Some(info) = transfers.get_mut(&file_id) {
info.state = TransferState::Complete { completed_at: std::time::Instant::now() };
info.state = TransferState::Complete {
completed_at: std::time::Instant::now(),
};
}
}
@@ -414,7 +410,9 @@ impl FileTransferManager {
// Check if it wasn't already updated (e.g. by manual reject)
if let Some(info) = transfers.get_mut(&file_id) {
// Could be Offering or WaitingForAccept depending on race
info.state = TransferState::Rejected { completed_at: std::time::Instant::now() };
info.state = TransferState::Rejected {
completed_at: std::time::Instant::now(),
};
}
}
@@ -496,7 +494,9 @@ impl FileTransferManager {
{
let mut transfers = self.transfers.lock().unwrap();
if let Some(info) = transfers.get_mut(&file_id) {
info.state = TransferState::Complete { completed_at: std::time::Instant::now() };
info.state = TransferState::Complete {
completed_at: std::time::Instant::now(),
};
}
}
@@ -528,7 +528,10 @@ impl FileTransferManager {
0
};
if info.is_outgoing {
format!("{} {} (Wait Accept - {}s)", direction, info.file_name, remaining)
format!(
"{} {} (Wait Accept - {}s)",
direction, info.file_name, remaining
)
} else {
format!(
"{} {} (Incoming Offer - {}s)",

View File

@@ -57,9 +57,6 @@ struct Cli {
/// Download directory for received files
#[arg(short, long, default_value = "~/Downloads")]
download_dir: String,
/// Screen resolution for sharing (e.g., 1280x720, 1920x1080)
#[arg(long)]
screen_resolution: Option<String>, // Changed to Option to fallback to config
}
#[tokio::main]
@@ -94,12 +91,7 @@ async fn main() -> Result<()> {
let cli = Cli::parse();
// Resolution: CLI > Config > Default
let res_str = cli
.screen_resolution
.as_deref()
.unwrap_or(&config.media.screen_resolution);
let screen_res = parse_resolution(res_str).unwrap_or((1280, 720));
// Topic: CLI > Config > Default
let topic_str = cli
@@ -136,12 +128,8 @@ async fn main() -> Result<()> {
let file_mgr = FileTransferManager::new(download_path);
// Pass mic name from config if present
let media = MediaState::new(
screen_res,
config.media.mic_name.clone(),
config.media.speaker_name.clone(),
config.media.mic_bitrate,
);
// Pass mic name from config if present
let media = MediaState::new(config.media.mic_bitrate);
// Initialize App with Theme
let theme = crate::config::Theme::from(config.ui.clone());
@@ -376,13 +364,4 @@ fn parse_topic(hex_str: &str) -> Result<[u8; 32]> {
}
}
fn parse_resolution(res: &str) -> Option<(u32, u32)> {
let parts: Vec<&str> = res.split('x').collect();
if parts.len() == 2 {
let w = parts[0].parse().ok()?;
let h = parts[1].parse().ok()?;
Some((w, h))
} else {
None
}
}

View File

@@ -134,7 +134,7 @@ async fn run_video_sender_web(
while running.load(Ordering::Relaxed) {
match input_rx.recv().await {
Ok(data) => {
// Web sends MJPEG chunk (full frame)
// Web sends WebP chunk (full frame)
let msg = MediaStreamMessage::VideoFrame {
sequence: 0, // Sequence not used for web input, set to 0
timestamp_ms: std::time::SystemTime::now()

View File

@@ -45,15 +45,6 @@ pub struct MediaState {
/// Playback task handles for incoming streams (voice/video).
/// Using a list to allow multiple streams (audio+video) from same or different peers.
incoming_media: Vec<tokio::task::JoinHandle<()>>,
/// Whether PipeWire is available on this system.
pipewire_available: bool,
/// Configured screen resolution (width, height).
#[allow(dead_code)]
screen_resolution: (u32, u32),
/// Configured microphone name (target).
mic_name: Option<String>,
/// Configured speaker name (target).
speaker_name: Option<String>,
/// Broadcast channel for web playback.
pub broadcast_tx: tokio::sync::broadcast::Sender<WebMediaEvent>,
// Input channels (from Web -> MediaState -> Peers)
@@ -64,13 +55,7 @@ pub struct MediaState {
}
impl MediaState {
pub fn new(
screen_resolution: (u32, u32),
mic_name: Option<String>,
speaker_name: Option<String>,
mic_bitrate: u32,
) -> Self {
let pipewire_available = check_pipewire();
pub fn new(mic_bitrate: u32) -> Self {
let (broadcast_tx, _) = tokio::sync::broadcast::channel(100);
let (mic_broadcast, _) = tokio::sync::broadcast::channel(100);
let (cam_broadcast, _) = tokio::sync::broadcast::channel(100);
@@ -80,10 +65,6 @@ impl MediaState {
camera: None,
screen: None,
incoming_media: Vec::new(),
pipewire_available,
screen_resolution,
mic_name,
speaker_name,
broadcast_tx,
mic_broadcast,
cam_broadcast,
@@ -96,16 +77,6 @@ impl MediaState {
self.mic_bitrate.store(bitrate, Ordering::Relaxed);
}
/// Update the selected microphone name.
pub fn set_mic_name(&mut self, name: Option<String>) {
self.mic_name = name;
}
/// Update the selected speaker name.
pub fn set_speaker_name(&mut self, name: Option<String>) {
self.speaker_name = name;
}
// -----------------------------------------------------------------------
// Public state queries
// -----------------------------------------------------------------------
@@ -122,20 +93,12 @@ impl MediaState {
self.screen.is_some()
}
#[allow(dead_code)]
pub fn pipewire_available(&self) -> bool {
self.pipewire_available
}
// -----------------------------------------------------------------------
// Toggle methods — return a status message for the TUI
// -----------------------------------------------------------------------
/// Toggle voice chat. Opens media QUIC streams to all current peers.
pub async fn toggle_voice(&mut self, net: NetworkManager) -> &'static str {
if !self.pipewire_available {
return "Voice chat unavailable (PipeWire not found)";
}
if self.voice.is_some() {
// Stop
if let Some(mut v) = self.voice.take() {
@@ -361,12 +324,3 @@ impl Drop for MediaState {
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
/// Check if PipeWire is available on this system.
fn check_pipewire() -> bool {
// Try to initialize PipeWire — if it fails, it's not available
std::panic::catch_unwind(|| {
pipewire::init();
})
.is_ok()
}

View File

@@ -25,86 +25,6 @@ const SAMPLE_RATE_VAL: i32 = 48000;
const FRAME_SIZE_MS: u32 = 20; // 20ms
const FRAME_SIZE_SAMPLES: usize = (SAMPLE_RATE_VAL as usize * FRAME_SIZE_MS as usize) / 1000;
/// Represents an available audio device (source or sink).
#[derive(Debug, Clone)]
pub struct AudioDevice {
/// PipeWire node name (used as target.object)
pub node_name: String,
/// Human-readable description
pub description: String,
}
/// List available audio input sources via `pw-dump`.
pub fn list_audio_sources() -> Vec<AudioDevice> {
list_audio_nodes("Audio/Source")
}
/// List available audio output sinks via `pw-dump`.
pub fn list_audio_sinks() -> Vec<AudioDevice> {
list_audio_nodes("Audio/Sink")
}
fn list_audio_nodes(filter_class: &str) -> Vec<AudioDevice> {
use std::process::Command;
let output = match Command::new("pw-dump").output() {
Ok(o) => o,
Err(_) => return Vec::new(),
};
if !output.status.success() {
return Vec::new();
}
let json_str = match String::from_utf8(output.stdout) {
Ok(s) => s,
Err(_) => return Vec::new(),
};
let data: Vec<serde_json::Value> = match serde_json::from_str(&json_str) {
Ok(d) => d,
Err(_) => return Vec::new(),
};
let mut sources = Vec::new();
for obj in &data {
let props = match obj.get("info").and_then(|i| i.get("props")) {
Some(p) => p,
None => continue,
};
let media_class = props
.get("media.class")
.and_then(|v| v.as_str())
.unwrap_or("");
// Match partial class, e.g. "Audio/Source", "Audio/Sink", "Audio/Duplex"
if !media_class.contains(filter_class) && !media_class.contains("Audio/Duplex") {
continue;
}
let node_name = props
.get("node.name")
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
let description = props
.get("node.description")
.or_else(|| props.get("node.nick"))
.and_then(|v| v.as_str())
.unwrap_or(&node_name)
.to_string();
if !node_name.is_empty() {
sources.push(AudioDevice {
node_name,
description,
});
}
}
sources
}
/// Main voice chat coordination.
pub struct VoiceChat {
running: Arc<AtomicBool>,

View File

@@ -103,8 +103,7 @@ pub fn render(frame: &mut Frame, area: Rect, file_mgr: &FileTransferManager, app
let remaining = expires_at.duration_since(now).as_secs();
// spinner (braille)
const SPINNER: &[&str] =
&["","", "", "", "", ""];
const SPINNER: &[&str] = &["", "", "", "", "", ""];
let millis = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
@@ -131,7 +130,7 @@ pub fn render(frame: &mut Frame, area: Rect, file_mgr: &FileTransferManager, app
let now = std::time::Instant::now();
if *expires_at > now {
let remaining = expires_at.duration_since(now).as_secs();
(
(
format!(
"[{}] {} (Requesting... {}s)",
id_short, info.file_name, remaining

View File

@@ -24,8 +24,6 @@ pub fn render(frame: &mut Frame, area: Rect, app: &App) {
app.theme.time,
app.input.as_str(),
),
InputMode::MicSelect => (" 🎤 Selecting microphone... ", app.theme.input_border, ""),
InputMode::SpeakerSelect => (" 🔊 Selecting speaker... ", app.theme.input_border, ""),
};
let block = Block::default()
@@ -48,7 +46,5 @@ pub fn render(frame: &mut Frame, area: Rect, app: &App) {
frame.set_cursor_position((area.x + 1 + app.file_path_input.len() as u16, area.y + 1));
}
InputMode::Normal => {}
InputMode::MicSelect => {}
InputMode::SpeakerSelect => {}
}
}

View File

@@ -9,15 +9,12 @@ pub mod status_bar;
use std::path::PathBuf;
use crossterm::event::{KeyCode, KeyEvent};
use ratatui::layout::{Constraint, Direction, Layout, Rect};
use ratatui::style::{Modifier, Style};
use ratatui::text::{Line, Span};
use ratatui::widgets::{Block, Borders, Clear, List, ListItem};
use ratatui::layout::{Constraint, Direction, Layout};
use ratatui::Frame;
use crate::chat::ChatState;
use crate::file_transfer::FileTransferManager;
use crate::media::voice::AudioDevice;
use crate::media::MediaState;
use crate::net::PeerInfo;
@@ -28,8 +25,6 @@ pub enum InputMode {
Editing,
#[allow(dead_code)]
FilePrompt,
MicSelect,
SpeakerSelect,
}
/// Commands produced by TUI event handling.
@@ -45,8 +40,7 @@ pub enum TuiCommand {
ToggleVoice,
ToggleCamera,
ToggleScreen,
SelectMic(String), // node_name of selected mic
SelectSpeaker(String), // node_name of selected speaker
SetBitrate(u32),
Leave,
Quit,
@@ -54,7 +48,6 @@ pub enum TuiCommand {
}
use crate::config::Theme;
// ... imports ...
/// Application state for the TUI.
pub struct App {
@@ -66,9 +59,6 @@ pub struct App {
pub show_file_panel: bool,
pub file_path_input: String,
pub theme: Theme,
// Device selection state (reused for Mic and Speaker)
pub audio_devices: Vec<AudioDevice>,
pub device_selected_index: usize,
}
impl App {
@@ -80,31 +70,14 @@ impl App {
scroll_offset: 0,
show_file_panel: true,
file_path_input: String::new(),
theme,
audio_devices: Vec::new(),
device_selected_index: 0,
}
}
/// Open the mic selection screen.
pub fn open_mic_select(&mut self, sources: Vec<AudioDevice>) {
self.audio_devices = sources;
self.device_selected_index = 0;
self.input_mode = InputMode::MicSelect;
}
/// Open the speaker selection screen.
pub fn open_speaker_select(&mut self, sinks: Vec<AudioDevice>) {
self.audio_devices = sinks;
self.device_selected_index = 0;
self.input_mode = InputMode::SpeakerSelect;
}
/// Handle a key event and return a command.
pub fn handle_key(&mut self, key: KeyEvent) -> TuiCommand {
match self.input_mode {
InputMode::MicSelect => self.handle_device_select_key(key),
InputMode::SpeakerSelect => self.handle_device_select_key(key),
InputMode::FilePrompt => self.handle_file_prompt_key(key),
InputMode::Editing => self.handle_editing_key(key),
InputMode::Normal => self.handle_normal_key(key),
@@ -141,62 +114,18 @@ impl App {
return TuiCommand::Connect(peer_id.to_string());
}
"voice" => return TuiCommand::ToggleVoice,
"mic" | "microphone" => {
// Open mic selection screen
let sources = crate::media::voice::list_audio_sources();
if sources.is_empty() {
return TuiCommand::SystemMessage(
"🎤 No audio sources found (is PipeWire running?)"
.to_string(),
);
}
self.open_mic_select(sources);
return TuiCommand::None;
}
"speaker" | "output" => {
// Open speaker selection screen
let sinks = crate::media::voice::list_audio_sinks();
if sinks.is_empty() {
return TuiCommand::SystemMessage(
"🔊 No audio outputs found (is PipeWire running?)"
.to_string(),
);
}
self.open_speaker_select(sinks);
return TuiCommand::None;
}
// mic/speaker commands removed
"camera" | "cam" => return TuiCommand::ToggleCamera,
"screen" | "share" => return TuiCommand::ToggleScreen,
"file" | "send" => {
let path = parts.get(1).unwrap_or(&"").trim();
if path.is_empty() {
// Open native file dialog
use std::process::Command;
let result = Command::new("zenity")
.args(["--file-selection", "--title=Select file to send"])
.output()
.or_else(|_| {
Command::new("kdialog")
.args(["--getopenfilename", "."])
.output()
});
match result {
Ok(output) if output.status.success() => {
let chosen = String::from_utf8_lossy(&output.stdout)
.trim()
.to_string();
if !chosen.is_empty() {
return TuiCommand::SendFile(PathBuf::from(chosen));
}
return TuiCommand::None; // cancelled
}
_ => {
return TuiCommand::SystemMessage(
"No file dialog available. Use: /file <path>"
.to_string(),
);
}
// Open native file dialog via rfd (cross-platform)
if let Some(file) = rfd::FileDialog::new().pick_file() {
return TuiCommand::SendFile(file);
}
return TuiCommand::None; // cancelled
}
return TuiCommand::SendFile(PathBuf::from(path));
}
@@ -213,7 +142,7 @@ impl App {
"leave" => return TuiCommand::Leave,
"help" => {
return TuiCommand::SystemMessage(
"Commands: /nick <name>, /connect <id>, /voice, /mic, /camera, /screen, /file <path>, /accept <prefix>, /leave, /quit".to_string(),
"Commands: /nick <name>, /connect <id>, /voice, /camera, /screen, /file <path>, /accept <prefix>, /leave, /quit".to_string(),
);
}
"bitrate" => {
@@ -345,45 +274,6 @@ impl App {
_ => TuiCommand::None,
}
}
fn handle_device_select_key(&mut self, key: KeyEvent) -> TuiCommand {
match key.code {
KeyCode::Up | KeyCode::Char('k') => {
if self.device_selected_index > 0 {
self.device_selected_index -= 1;
}
TuiCommand::None
}
KeyCode::Down | KeyCode::Char('j') => {
if self.device_selected_index + 1 < self.audio_devices.len() {
self.device_selected_index += 1;
}
TuiCommand::None
}
KeyCode::Enter => {
if let Some(dev) = self.audio_devices.get(self.device_selected_index) {
let node_name = dev.node_name.clone();
let mode = self.input_mode.clone();
self.input_mode = InputMode::Editing;
self.audio_devices.clear();
if mode == InputMode::MicSelect {
return TuiCommand::SelectMic(node_name);
} else {
return TuiCommand::SelectSpeaker(node_name);
}
}
self.input_mode = InputMode::Editing;
TuiCommand::None
}
KeyCode::Esc | KeyCode::Char('q') => {
self.input_mode = InputMode::Editing;
self.audio_devices.clear();
TuiCommand::None
}
_ => TuiCommand::None,
}
}
}
/// Render the full application UI.
@@ -450,66 +340,6 @@ pub fn render(
connected,
&app.input_mode,
);
// Render device selection overlay if active
if app.input_mode == InputMode::MicSelect || app.input_mode == InputMode::SpeakerSelect {
render_device_overlay(frame, size, app);
}
}
/// Render the device selection overlay (centered popup).
fn render_device_overlay(frame: &mut Frame, area: Rect, app: &App) {
let popup_width = 60u16.min(area.width.saturating_sub(4));
let popup_height = (app.audio_devices.len() as u16 + 4).min(area.height.saturating_sub(4));
let x = (area.width.saturating_sub(popup_width)) / 2;
let y = (area.height.saturating_sub(popup_height)) / 2;
let popup_area = Rect::new(x, y, popup_width, popup_height);
// Clear the background
frame.render_widget(Clear, popup_area);
let title = if app.input_mode == InputMode::MicSelect {
" 🎤 Select Microphone (↑↓ Enter Esc) "
} else {
" 🔊 Select Speaker (↑↓ Enter Esc) "
};
let block = Block::default()
.title(title)
.borders(Borders::ALL)
.border_style(Style::default().fg(app.theme.input_border));
let inner = block.inner(popup_area);
frame.render_widget(block, popup_area);
let items: Vec<ListItem> = app
.audio_devices
.iter()
.enumerate()
.map(|(i, dev)| {
let marker = if i == app.device_selected_index {
""
} else {
" "
};
let style = if i == app.device_selected_index {
Style::default()
.fg(app.theme.self_name)
.add_modifier(Modifier::BOLD)
} else {
Style::default().fg(app.theme.text)
};
ListItem::new(Line::from(Span::styled(
format!("{}{}", marker, dev.description),
style,
)))
})
.collect();
let list = List::new(items);
frame.render_widget(list, inner);
}
/// Deterministic random color for a peer info string.

View File

@@ -37,13 +37,6 @@ pub fn render(
" FILE ",
Style::default().fg(Color::Black).bg(Color::Yellow),
),
InputMode::MicSelect => Span::styled(
" MIC ",
Style::default().fg(Color::Black).bg(Color::Magenta),
),
InputMode::SpeakerSelect => {
Span::styled(" SPKR ", Style::default().fg(Color::Black).bg(Color::Cyan))
}
};
let line = Line::from(vec![

View File

@@ -99,7 +99,7 @@ async fn handle_socket(socket: WebSocket, state: AppState) {
kind,
data,
} => {
// 1 byte header (1=Camera, 2=Screen) + 1 byte ID len + ID bytes + MJPEG data
// 1 byte header (1=Camera, 2=Screen) + 1 byte ID len + ID bytes + WebP data
let header = match kind {
MediaKind::Camera => 1u8,
MediaKind::Screen => 2u8,
@@ -146,12 +146,12 @@ async fn handle_socket(socket: WebSocket, state: AppState) {
}
}
4 => {
// Camera (MJPEG)
// Camera (WebP)
tracing::debug!("Received camera frame: {} bytes", payload.len());
let _ = state.cam_tx.send(payload.to_vec());
}
5 => {
// Screen (MJPEG)
// Screen (WebP)
tracing::debug!("Received screen frame: {} bytes", payload.len());
let _ = state.screen_tx.send(payload.to_vec());
}

View File

@@ -189,8 +189,8 @@ mod config_tests {
#[test]
fn default_config_values() {
let config = AppConfig::default();
assert_eq!(config.media.screen_resolution, "1280x720");
assert!(config.media.mic_name.is_none());
// assert_eq!(config.media.screen_resolution, "1280x720");
// assert!(config.media.mic_name.is_none());
assert!(config.network.topic.is_none());
}
@@ -199,11 +199,11 @@ mod config_tests {
let config = AppConfig::default();
let toml_str = toml::to_string_pretty(&config).unwrap();
let parsed: AppConfig = toml::from_str(&toml_str).unwrap();
assert_eq!(
parsed.media.screen_resolution,
config.media.screen_resolution
);
assert_eq!(parsed.media.mic_name, config.media.mic_name);
// assert_eq!(
// parsed.media.screen_resolution,
// config.media.screen_resolution
// );
// assert_eq!(parsed.media.mic_name, config.media.mic_name);
assert_eq!(parsed.network.topic, config.network.topic);
}
@@ -214,7 +214,7 @@ mod config_tests {
screen_resolution = "1920x1080"
"#;
let config: AppConfig = toml::from_str(toml_str).unwrap();
assert_eq!(config.media.screen_resolution, "1920x1080");
// assert_eq!(config.media.screen_resolution, "1920x1080");
// network should use default
assert!(config.network.topic.is_none());
}
@@ -268,10 +268,9 @@ screen_resolution = "1920x1080"
fn theme_from_ui_config() {
let ui = UiConfig::default();
let theme: Theme = ui.into();
assert_eq!(theme.border, Color::Cyan);
assert_eq!(theme.chat_border, Color::Cyan);
assert_eq!(theme.text, Color::White);
assert_eq!(theme.self_name, Color::Green);
assert_eq!(theme.peer_name, Color::Magenta);
assert_eq!(theme.system_msg, Color::Yellow);
assert_eq!(theme.time, Color::DarkGray);
}
@@ -279,10 +278,8 @@ screen_resolution = "1920x1080"
#[test]
fn ui_config_defaults() {
let ui = UiConfig::default();
assert_eq!(ui.border, "cyan");
assert_eq!(ui.text, "white");
assert_eq!(ui.self_name, "green");
assert_eq!(ui.peer_name, "magenta");
assert_eq!(ui.system_msg, "yellow");
assert_eq!(ui.time, "dark_gray");
}

View File

@@ -166,7 +166,7 @@ function handleRemoteAudio(peer, arrayBuffer) {
function handleRemoteVideo(peer, arrayBuffer, type) {
const cardObj = getOrCreateCard(peer, type);
const blob = new Blob([arrayBuffer], { type: 'image/jpeg' });
const blob = new Blob([arrayBuffer], { type: 'image/webp' });
const url = URL.createObjectURL(blob);
const prevUrl = cardObj.imgElement.src;
@@ -361,7 +361,7 @@ function startVideoSender(stream, headerByte) {
}
};
reader.readAsArrayBuffer(blob);
}, 'image/jpeg', 0.6);
}, 'image/webp', 0.6);
}
setTimeout(sendFrame, 100); // 10 FPS
};