This commit is contained in:
mixa
2026-02-11 20:37:44 +03:00
commit 4f3517b97b
27 changed files with 12584 additions and 0 deletions

1
.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
target

3
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,3 @@
{
"git.ignoreLimitWarning": true
}

7015
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

64
Cargo.toml Normal file
View File

@@ -0,0 +1,64 @@
[package]
name = "p2p-chat"
version = "0.1.0"
edition = "2021"
description = "A peer-to-peer chat application with file transfer, built on QUIC via iroh"
[dependencies]
# Networking
iroh = "0.96"
iroh-gossip = "0.96"
n0-future = "0.3"
# Async runtime
tokio = { version = "1", features = ["full"] }
# TUI
ratatui = "0.30"
crossterm = { version = "0.28", features = ["event-stream"] }
# Serialization
serde = { version = "1", features = ["derive"] }
serde_json = "1"
bincode = "1"
postcard = { version = "1", features = ["alloc"] }
# Utilities
bytes = "1"
sha2 = "0.10"
chrono = "0.4"
uuid = { version = "1", features = ["v4"] }
anyhow = "1"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
clap = { version = "4", features = ["derive"] }
rand = "0.8"
# Configuration
toml = "0.7"
directories = "5.0"
# Media
pipewire = "0.9"
libspa = "0.9"
songbird = { version = "0.4", features = ["builtin-queue"] }
audiopus = "0.2"
crossbeam-channel = "0.5"
axum = { version = "0.8.8", features = ["ws"] }
tokio-stream = "0.1.18"
rust-embed = "8.11.0"
futures = "0.3.31"
tower-http = { version = "0.6.8", features = ["fs", "cors"] }
mime_guess = "2.0.5"
[profile.dev]
opt-level = 0
debug = true
[profile.release]
opt-level = 3 # Maximum optimization
lto = "fat" # Full link-time optimization across all crates
codegen-units = 1 # Single codegen unit for best optimization (slower compile)
panic = "abort" # Smaller binary, no unwinding overhead
strip = true # Strip debug symbols from binary
overflow-checks = false

324
p2p-chat.log Normal file
View File

@@ -0,0 +1,324 @@
2026-02-11T12:16:43.446349Z INFO p2p_chat::web: Web interface listening on http://127.0.0.1:6969
2026-02-11T12:17:01.401544Z ERROR p2p_chat::media::voice: Voice sender web error: Failed to connect for media stream
2026-02-11T16:36:56.002043Z INFO p2p_chat::web: Web interface listening on http://127.0.0.1:6969
2026-02-11T16:37:08.175934Z INFO p2p_chat::web: Web interface listening on http://127.0.0.1:6970
2026-02-11T16:37:21.007537Z WARN iroh_quinn_proto::connection: remote server configuration might cause nat traversal issues max_local_addresses=12 remote_cid_limit=5
2026-02-11T16:37:21.235476Z WARN gossip{me=7674721a58}: iroh_quinn_proto::connection: remote server configuration might cause nat traversal issues max_local_addresses=12 remote_cid_limit=5
2026-02-11T16:37:26.129541Z ERROR p2p_chat::media::voice: Voice sender web error: Failed to connect for media stream
2026-02-11T16:37:26.132439Z WARN iroh_quinn_proto::connection: remote server configuration might cause nat traversal issues max_local_addresses=12 remote_cid_limit=5
2026-02-11T16:37:26.134817Z INFO p2p_chat::media: Accepted Audio stream from PublicKey(7674721a58e203eac4b4b44ce74c941e457c1686f0c3822347847a14e1951b57)
2026-02-11T16:37:26.134910Z INFO p2p_chat::media::voice: Incoming voice stream started (web)
2026-02-11T16:38:39.256197Z DEBUG p2p_chat::web: Received screen frame: 222545 bytes
2026-02-11T16:38:39.384580Z DEBUG p2p_chat::web: Received screen frame: 222486 bytes
2026-02-11T16:38:39.509008Z DEBUG p2p_chat::web: Received screen frame: 222475 bytes
2026-02-11T16:38:39.634092Z DEBUG p2p_chat::web: Received screen frame: 222448 bytes
2026-02-11T16:38:39.760878Z DEBUG p2p_chat::web: Received screen frame: 222521 bytes
2026-02-11T16:38:39.890677Z DEBUG p2p_chat::web: Received screen frame: 226127 bytes
2026-02-11T16:38:40.024723Z DEBUG p2p_chat::web: Received screen frame: 227887 bytes
2026-02-11T16:38:40.156064Z DEBUG p2p_chat::web: Received screen frame: 223115 bytes
2026-02-11T16:38:40.283115Z DEBUG p2p_chat::web: Received screen frame: 223053 bytes
2026-02-11T16:38:40.408418Z DEBUG p2p_chat::web: Received screen frame: 223251 bytes
2026-02-11T16:38:40.517284Z DEBUG p2p_chat::web: Received screen frame: 223140 bytes
2026-02-11T16:38:40.625172Z DEBUG p2p_chat::web: Received screen frame: 223228 bytes
2026-02-11T16:38:40.731547Z DEBUG p2p_chat::web: Received screen frame: 223358 bytes
2026-02-11T16:38:40.835317Z DEBUG p2p_chat::web: Received screen frame: 223179 bytes
2026-02-11T16:38:40.943378Z DEBUG p2p_chat::web: Received screen frame: 223265 bytes
2026-02-11T16:38:41.049721Z DEBUG p2p_chat::web: Received screen frame: 223353 bytes
2026-02-11T16:38:41.156372Z DEBUG p2p_chat::web: Received screen frame: 223370 bytes
2026-02-11T16:38:41.262222Z DEBUG p2p_chat::web: Received screen frame: 223301 bytes
2026-02-11T16:38:41.367052Z DEBUG p2p_chat::web: Received screen frame: 223244 bytes
2026-02-11T16:38:41.474297Z DEBUG p2p_chat::web: Received screen frame: 223282 bytes
2026-02-11T16:38:41.581267Z DEBUG p2p_chat::web: Received screen frame: 222884 bytes
2026-02-11T16:38:41.689764Z DEBUG p2p_chat::web: Received screen frame: 222714 bytes
2026-02-11T16:38:41.795363Z DEBUG p2p_chat::web: Received screen frame: 222364 bytes
2026-02-11T16:38:41.902893Z DEBUG p2p_chat::web: Received screen frame: 223055 bytes
2026-02-11T16:38:42.011299Z DEBUG p2p_chat::web: Received screen frame: 223174 bytes
2026-02-11T16:38:42.116650Z DEBUG p2p_chat::web: Received screen frame: 223249 bytes
2026-02-11T16:38:42.221509Z DEBUG p2p_chat::web: Received screen frame: 223302 bytes
2026-02-11T16:38:42.325697Z DEBUG p2p_chat::web: Received screen frame: 223158 bytes
2026-02-11T16:38:42.430021Z DEBUG p2p_chat::web: Received screen frame: 223260 bytes
2026-02-11T16:38:42.535410Z DEBUG p2p_chat::web: Received screen frame: 223076 bytes
2026-02-11T16:38:42.644657Z DEBUG p2p_chat::web: Received screen frame: 223279 bytes
2026-02-11T16:38:42.752129Z DEBUG p2p_chat::web: Received screen frame: 224782 bytes
2026-02-11T16:38:42.856602Z DEBUG p2p_chat::web: Received screen frame: 224218 bytes
2026-02-11T16:38:42.961267Z DEBUG p2p_chat::web: Received screen frame: 224178 bytes
2026-02-11T16:38:43.067158Z DEBUG p2p_chat::web: Received screen frame: 224327 bytes
2026-02-11T16:38:43.171276Z DEBUG p2p_chat::web: Received screen frame: 224313 bytes
2026-02-11T16:38:43.275128Z DEBUG p2p_chat::web: Received screen frame: 224345 bytes
2026-02-11T16:38:43.381958Z DEBUG p2p_chat::web: Received screen frame: 224276 bytes
2026-02-11T16:38:43.485921Z DEBUG p2p_chat::web: Received screen frame: 224357 bytes
2026-02-11T16:38:43.596004Z DEBUG p2p_chat::web: Received screen frame: 224329 bytes
2026-02-11T16:38:43.704652Z DEBUG p2p_chat::web: Received screen frame: 224368 bytes
2026-02-11T16:38:43.813300Z DEBUG p2p_chat::web: Received screen frame: 224194 bytes
2026-02-11T16:38:43.921742Z DEBUG p2p_chat::web: Received screen frame: 224228 bytes
2026-02-11T16:38:44.030558Z DEBUG p2p_chat::web: Received screen frame: 224261 bytes
2026-02-11T16:38:44.135797Z DEBUG p2p_chat::web: Received screen frame: 224345 bytes
2026-02-11T16:38:44.245682Z DEBUG p2p_chat::web: Received screen frame: 225272 bytes
2026-02-11T16:38:44.354876Z DEBUG p2p_chat::web: Received screen frame: 223359 bytes
2026-02-11T16:38:44.463893Z DEBUG p2p_chat::web: Received screen frame: 223360 bytes
2026-02-11T16:38:44.574700Z DEBUG p2p_chat::web: Received screen frame: 223058 bytes
2026-02-11T16:38:44.684199Z DEBUG p2p_chat::web: Received screen frame: 224573 bytes
2026-02-11T16:38:44.791637Z DEBUG p2p_chat::web: Received screen frame: 225211 bytes
2026-02-11T16:38:44.899143Z DEBUG p2p_chat::web: Received screen frame: 223344 bytes
2026-02-11T16:38:45.005228Z DEBUG p2p_chat::web: Received screen frame: 225761 bytes
2026-02-11T16:38:45.110952Z DEBUG p2p_chat::web: Received screen frame: 224395 bytes
2026-02-11T16:38:45.217118Z DEBUG p2p_chat::web: Received screen frame: 225173 bytes
2026-02-11T16:38:45.322489Z DEBUG p2p_chat::web: Received screen frame: 223001 bytes
2026-02-11T16:38:45.428021Z DEBUG p2p_chat::web: Received screen frame: 223232 bytes
2026-02-11T16:38:45.533755Z DEBUG p2p_chat::web: Received screen frame: 223341 bytes
2026-02-11T16:38:45.637161Z DEBUG p2p_chat::web: Received screen frame: 223286 bytes
2026-02-11T16:38:45.741755Z DEBUG p2p_chat::web: Received screen frame: 223342 bytes
2026-02-11T16:38:45.846054Z DEBUG p2p_chat::web: Received screen frame: 223327 bytes
2026-02-11T16:38:45.954329Z DEBUG p2p_chat::web: Received screen frame: 223295 bytes
2026-02-11T16:38:46.061481Z DEBUG p2p_chat::web: Received screen frame: 223320 bytes
2026-02-11T16:38:46.167641Z DEBUG p2p_chat::web: Received screen frame: 223257 bytes
2026-02-11T16:38:46.275054Z DEBUG p2p_chat::web: Received screen frame: 227212 bytes
2026-02-11T16:38:46.379678Z DEBUG p2p_chat::web: Received screen frame: 223506 bytes
2026-02-11T16:38:46.484220Z DEBUG p2p_chat::web: Received screen frame: 223686 bytes
2026-02-11T16:38:46.590779Z DEBUG p2p_chat::web: Received screen frame: 223376 bytes
2026-02-11T16:38:46.698925Z DEBUG p2p_chat::web: Received screen frame: 223371 bytes
2026-02-11T16:38:46.805201Z DEBUG p2p_chat::web: Received screen frame: 223196 bytes
2026-02-11T16:38:46.912272Z DEBUG p2p_chat::web: Received screen frame: 223109 bytes
2026-02-11T16:38:47.018848Z DEBUG p2p_chat::web: Received screen frame: 223121 bytes
2026-02-11T16:38:47.124929Z DEBUG p2p_chat::web: Received screen frame: 223138 bytes
2026-02-11T16:38:47.232210Z DEBUG p2p_chat::web: Received screen frame: 223095 bytes
2026-02-11T16:38:47.338134Z DEBUG p2p_chat::web: Received screen frame: 223483 bytes
2026-02-11T16:38:47.446152Z DEBUG p2p_chat::web: Received screen frame: 223366 bytes
2026-02-11T16:38:47.551007Z DEBUG p2p_chat::web: Received screen frame: 223578 bytes
2026-02-11T16:38:47.656538Z DEBUG p2p_chat::web: Received screen frame: 223513 bytes
2026-02-11T16:38:47.766616Z DEBUG p2p_chat::web: Received screen frame: 223500 bytes
2026-02-11T16:38:47.873598Z DEBUG p2p_chat::web: Received screen frame: 223570 bytes
2026-02-11T16:38:47.982245Z DEBUG p2p_chat::web: Received screen frame: 223564 bytes
2026-02-11T16:38:48.090719Z DEBUG p2p_chat::web: Received screen frame: 223610 bytes
2026-02-11T16:38:48.200733Z DEBUG p2p_chat::web: Received screen frame: 222575 bytes
2026-02-11T16:38:48.308074Z DEBUG p2p_chat::web: Received screen frame: 222637 bytes
2026-02-11T16:38:48.420652Z DEBUG p2p_chat::web: Received screen frame: 222616 bytes
2026-02-11T16:38:48.532925Z DEBUG p2p_chat::web: Received screen frame: 227065 bytes
2026-02-11T16:38:48.645722Z DEBUG p2p_chat::web: Received screen frame: 227947 bytes
2026-02-11T16:38:48.757247Z DEBUG p2p_chat::web: Received screen frame: 228164 bytes
2026-02-11T16:38:48.868776Z DEBUG p2p_chat::web: Received screen frame: 228168 bytes
2026-02-11T16:38:48.979834Z DEBUG p2p_chat::web: Received screen frame: 228115 bytes
2026-02-11T16:38:49.091226Z DEBUG p2p_chat::web: Received screen frame: 228213 bytes
2026-02-11T16:38:49.202653Z DEBUG p2p_chat::web: Received screen frame: 228192 bytes
2026-02-11T16:38:49.311287Z DEBUG p2p_chat::web: Received screen frame: 228162 bytes
2026-02-11T16:38:49.421010Z DEBUG p2p_chat::web: Received screen frame: 228112 bytes
2026-02-11T16:38:49.529169Z DEBUG p2p_chat::web: Received screen frame: 228200 bytes
2026-02-11T16:38:49.638545Z DEBUG p2p_chat::web: Received screen frame: 228172 bytes
2026-02-11T16:38:49.748120Z DEBUG p2p_chat::web: Received screen frame: 228248 bytes
2026-02-11T16:38:49.858933Z DEBUG p2p_chat::web: Received screen frame: 228258 bytes
2026-02-11T16:38:49.967826Z DEBUG p2p_chat::web: Received screen frame: 228445 bytes
2026-02-11T16:38:50.078356Z DEBUG p2p_chat::web: Received screen frame: 228484 bytes
2026-02-11T16:38:50.190910Z DEBUG p2p_chat::web: Received screen frame: 228473 bytes
2026-02-11T16:38:50.298529Z DEBUG p2p_chat::web: Received screen frame: 228502 bytes
2026-02-11T16:38:50.408287Z DEBUG p2p_chat::web: Received screen frame: 228482 bytes
2026-02-11T16:38:50.516729Z DEBUG p2p_chat::web: Received screen frame: 228508 bytes
2026-02-11T16:38:50.624651Z DEBUG p2p_chat::web: Received screen frame: 228371 bytes
2026-02-11T16:38:50.732204Z DEBUG p2p_chat::web: Received screen frame: 228398 bytes
2026-02-11T16:38:50.839703Z DEBUG p2p_chat::web: Received screen frame: 228451 bytes
2026-02-11T16:38:50.950365Z DEBUG p2p_chat::web: Received screen frame: 228052 bytes
2026-02-11T16:38:51.060473Z DEBUG p2p_chat::web: Received screen frame: 233933 bytes
2026-02-11T16:38:51.171401Z DEBUG p2p_chat::web: Received screen frame: 237132 bytes
2026-02-11T16:38:51.280639Z DEBUG p2p_chat::web: Received screen frame: 237284 bytes
2026-02-11T16:38:51.392789Z DEBUG p2p_chat::web: Received screen frame: 230703 bytes
2026-02-11T16:38:51.501068Z DEBUG p2p_chat::web: Received screen frame: 228509 bytes
2026-02-11T16:38:51.607607Z DEBUG p2p_chat::web: Received screen frame: 228483 bytes
2026-02-11T16:38:51.712440Z DEBUG p2p_chat::web: Received screen frame: 228551 bytes
2026-02-11T16:38:51.824267Z DEBUG p2p_chat::web: Received screen frame: 227428 bytes
2026-02-11T16:38:51.934584Z DEBUG p2p_chat::web: Received screen frame: 223474 bytes
2026-02-11T16:38:52.038889Z DEBUG p2p_chat::web: Received screen frame: 223377 bytes
2026-02-11T16:38:52.147456Z DEBUG p2p_chat::web: Received screen frame: 221754 bytes
2026-02-11T16:38:52.256093Z DEBUG p2p_chat::web: Received screen frame: 224925 bytes
2026-02-11T16:38:52.371056Z DEBUG p2p_chat::web: Received screen frame: 224316 bytes
2026-02-11T16:38:52.478515Z DEBUG p2p_chat::web: Received screen frame: 224235 bytes
2026-02-11T16:38:52.587715Z DEBUG p2p_chat::web: Received screen frame: 224444 bytes
2026-02-11T16:38:52.694790Z DEBUG p2p_chat::web: Received screen frame: 224315 bytes
2026-02-11T16:38:52.803360Z DEBUG p2p_chat::web: Received screen frame: 224471 bytes
2026-02-11T16:38:52.913055Z DEBUG p2p_chat::web: Received screen frame: 223702 bytes
2026-02-11T16:38:53.020693Z DEBUG p2p_chat::web: Received screen frame: 223369 bytes
2026-02-11T16:38:53.128885Z DEBUG p2p_chat::web: Received screen frame: 223241 bytes
2026-02-11T16:38:53.233586Z DEBUG p2p_chat::web: Received screen frame: 223139 bytes
2026-02-11T16:38:53.339499Z DEBUG p2p_chat::web: Received screen frame: 225770 bytes
2026-02-11T16:38:53.443997Z DEBUG p2p_chat::web: Received screen frame: 224180 bytes
2026-02-11T16:38:53.548434Z DEBUG p2p_chat::web: Received screen frame: 224226 bytes
2026-02-11T16:38:53.652763Z DEBUG p2p_chat::web: Received screen frame: 224160 bytes
2026-02-11T16:38:53.756895Z DEBUG p2p_chat::web: Received screen frame: 224134 bytes
2026-02-11T16:38:53.860821Z DEBUG p2p_chat::web: Received screen frame: 224146 bytes
2026-02-11T16:38:53.966535Z DEBUG p2p_chat::web: Received screen frame: 224138 bytes
2026-02-11T16:38:54.071237Z DEBUG p2p_chat::web: Received screen frame: 224233 bytes
2026-02-11T16:38:54.179349Z DEBUG p2p_chat::web: Received screen frame: 224316 bytes
2026-02-11T16:38:54.283245Z DEBUG p2p_chat::web: Received screen frame: 224213 bytes
2026-02-11T16:38:54.390859Z DEBUG p2p_chat::web: Received screen frame: 224262 bytes
2026-02-11T16:38:54.499830Z DEBUG p2p_chat::web: Received screen frame: 224254 bytes
2026-02-11T16:38:54.602734Z DEBUG p2p_chat::web: Received screen frame: 224190 bytes
2026-02-11T16:38:54.709369Z DEBUG p2p_chat::web: Received screen frame: 224727 bytes
2026-02-11T16:38:54.812778Z DEBUG p2p_chat::web: Received screen frame: 223280 bytes
2026-02-11T16:38:54.917755Z DEBUG p2p_chat::web: Received screen frame: 223293 bytes
2026-02-11T16:38:55.022347Z DEBUG p2p_chat::web: Received screen frame: 223164 bytes
2026-02-11T16:38:55.127888Z DEBUG p2p_chat::web: Received screen frame: 223305 bytes
2026-02-11T16:38:55.232778Z DEBUG p2p_chat::web: Received screen frame: 223270 bytes
2026-02-11T16:38:55.336328Z DEBUG p2p_chat::web: Received screen frame: 223192 bytes
2026-02-11T16:38:55.440921Z DEBUG p2p_chat::web: Received screen frame: 223254 bytes
2026-02-11T16:38:55.544606Z DEBUG p2p_chat::web: Received screen frame: 223275 bytes
2026-02-11T16:38:55.648711Z DEBUG p2p_chat::web: Received screen frame: 223226 bytes
2026-02-11T16:38:55.754063Z DEBUG p2p_chat::web: Received screen frame: 223143 bytes
2026-02-11T16:38:55.862396Z DEBUG p2p_chat::web: Received screen frame: 223255 bytes
2026-02-11T16:38:55.966488Z DEBUG p2p_chat::web: Received screen frame: 227921 bytes
2026-02-11T16:38:56.075393Z DEBUG p2p_chat::web: Received screen frame: 228623 bytes
2026-02-11T16:38:56.179639Z DEBUG p2p_chat::web: Received screen frame: 228803 bytes
2026-02-11T16:38:56.283775Z DEBUG p2p_chat::web: Received screen frame: 228904 bytes
2026-02-11T16:38:56.390177Z DEBUG p2p_chat::web: Received screen frame: 228838 bytes
2026-02-11T16:38:56.497795Z DEBUG p2p_chat::web: Received screen frame: 228918 bytes
2026-02-11T16:38:56.605318Z DEBUG p2p_chat::web: Received screen frame: 223439 bytes
2026-02-11T16:38:56.709567Z DEBUG p2p_chat::web: Received screen frame: 223461 bytes
2026-02-11T16:38:56.817819Z DEBUG p2p_chat::web: Received screen frame: 223306 bytes
2026-02-11T16:38:56.928778Z DEBUG p2p_chat::web: Received screen frame: 223303 bytes
2026-02-11T16:38:57.047827Z DEBUG p2p_chat::web: Received screen frame: 222813 bytes
2026-02-11T16:38:57.152119Z DEBUG p2p_chat::web: Received screen frame: 220986 bytes
2026-02-11T16:38:57.257610Z DEBUG p2p_chat::web: Received screen frame: 220399 bytes
2026-02-11T16:38:57.370956Z DEBUG p2p_chat::web: Received screen frame: 220273 bytes
2026-02-11T16:38:57.478844Z DEBUG p2p_chat::web: Received screen frame: 220062 bytes
2026-02-11T16:38:57.584093Z DEBUG p2p_chat::web: Received screen frame: 220201 bytes
2026-02-11T16:38:57.689024Z DEBUG p2p_chat::web: Received screen frame: 220054 bytes
2026-02-11T16:38:57.795894Z DEBUG p2p_chat::web: Received screen frame: 220189 bytes
2026-02-11T16:38:57.901412Z DEBUG p2p_chat::web: Received screen frame: 220246 bytes
2026-02-11T16:38:58.011714Z DEBUG p2p_chat::web: Received screen frame: 220235 bytes
2026-02-11T16:38:58.120831Z DEBUG p2p_chat::web: Received screen frame: 220235 bytes
2026-02-11T16:38:58.228052Z DEBUG p2p_chat::web: Received screen frame: 220235 bytes
2026-02-11T16:38:58.333874Z DEBUG p2p_chat::web: Received screen frame: 220334 bytes
2026-02-11T16:38:58.439102Z DEBUG p2p_chat::web: Received screen frame: 220329 bytes
2026-02-11T16:38:58.543380Z DEBUG p2p_chat::web: Received screen frame: 220254 bytes
2026-02-11T16:38:58.652113Z DEBUG p2p_chat::web: Received screen frame: 220271 bytes
2026-02-11T16:38:58.760926Z DEBUG p2p_chat::web: Received screen frame: 220271 bytes
2026-02-11T16:38:58.870651Z DEBUG p2p_chat::web: Received screen frame: 220271 bytes
2026-02-11T16:38:58.976952Z DEBUG p2p_chat::web: Received screen frame: 220271 bytes
2026-02-11T16:38:59.086685Z DEBUG p2p_chat::web: Received screen frame: 221787 bytes
2026-02-11T16:38:59.190153Z DEBUG p2p_chat::web: Received screen frame: 226379 bytes
2026-02-11T16:38:59.295261Z DEBUG p2p_chat::web: Received screen frame: 228569 bytes
2026-02-11T16:38:59.404905Z DEBUG p2p_chat::web: Received screen frame: 228835 bytes
2026-02-11T16:38:59.511518Z DEBUG p2p_chat::web: Received screen frame: 229108 bytes
2026-02-11T16:38:59.617118Z DEBUG p2p_chat::web: Received screen frame: 226242 bytes
2026-02-11T16:38:59.724048Z DEBUG p2p_chat::web: Received screen frame: 223787 bytes
2026-02-11T16:38:59.831684Z DEBUG p2p_chat::web: Received screen frame: 223500 bytes
2026-02-11T16:38:59.939542Z DEBUG p2p_chat::web: Received screen frame: 223421 bytes
2026-02-11T16:39:00.047819Z DEBUG p2p_chat::web: Received screen frame: 223416 bytes
2026-02-11T16:39:00.156037Z DEBUG p2p_chat::web: Received screen frame: 223364 bytes
2026-02-11T16:39:00.270226Z DEBUG p2p_chat::web: Received screen frame: 222894 bytes
2026-02-11T16:39:00.374427Z DEBUG p2p_chat::web: Received screen frame: 222680 bytes
2026-02-11T16:39:00.479065Z DEBUG p2p_chat::web: Received screen frame: 222787 bytes
2026-02-11T16:39:00.585488Z DEBUG p2p_chat::web: Received screen frame: 222662 bytes
2026-02-11T16:39:00.690629Z DEBUG p2p_chat::web: Received screen frame: 222649 bytes
2026-02-11T16:39:00.796456Z DEBUG p2p_chat::web: Received screen frame: 222761 bytes
2026-02-11T16:39:00.906250Z DEBUG p2p_chat::web: Received screen frame: 222722 bytes
2026-02-11T16:39:01.010822Z DEBUG p2p_chat::web: Received screen frame: 222634 bytes
2026-02-11T16:39:01.116146Z DEBUG p2p_chat::web: Received screen frame: 222649 bytes
2026-02-11T16:39:01.222085Z DEBUG p2p_chat::web: Received screen frame: 222517 bytes
2026-02-11T16:39:05.397912Z ERROR p2p_chat::media::capture: Video sender web error: Failed to connect for media stream
2026-02-11T16:39:05.400399Z WARN iroh_quinn_proto::connection: remote server configuration might cause nat traversal issues max_local_addresses=12 remote_cid_limit=5
2026-02-11T16:39:05.402559Z INFO p2p_chat::media: Accepted Video stream from PublicKey(7674721a58e203eac4b4b44ce74c941e457c1686f0c3822347847a14e1951b57)
2026-02-11T16:39:05.402602Z INFO p2p_chat::media::capture: Starting Screen playback via Web Broadcast
2026-02-11T16:39:10.031877Z DEBUG p2p_chat::web: Received screen frame: 224143 bytes
2026-02-11T16:39:10.163878Z DEBUG p2p_chat::web: Received screen frame: 238165 bytes
2026-02-11T16:39:10.290992Z DEBUG p2p_chat::web: Received screen frame: 239833 bytes
2026-02-11T16:39:10.417287Z DEBUG p2p_chat::web: Received screen frame: 239899 bytes
2026-02-11T16:39:10.550698Z DEBUG p2p_chat::web: Received screen frame: 240068 bytes
2026-02-11T16:39:10.681492Z DEBUG p2p_chat::web: Received screen frame: 240501 bytes
2026-02-11T16:39:10.811916Z DEBUG p2p_chat::web: Received screen frame: 240964 bytes
2026-02-11T16:39:10.943473Z DEBUG p2p_chat::web: Received screen frame: 240948 bytes
2026-02-11T16:39:11.072369Z DEBUG p2p_chat::web: Received screen frame: 240886 bytes
2026-02-11T16:39:11.202679Z DEBUG p2p_chat::web: Received screen frame: 241000 bytes
2026-02-11T16:39:11.314576Z DEBUG p2p_chat::web: Received screen frame: 240971 bytes
2026-02-11T16:39:11.422556Z DEBUG p2p_chat::web: Received screen frame: 240887 bytes
2026-02-11T16:39:11.529221Z DEBUG p2p_chat::web: Received screen frame: 241007 bytes
2026-02-11T16:39:11.637948Z DEBUG p2p_chat::web: Received screen frame: 240946 bytes
2026-02-11T16:39:11.747607Z DEBUG p2p_chat::web: Received screen frame: 240991 bytes
2026-02-11T16:39:11.855580Z DEBUG p2p_chat::web: Received screen frame: 240964 bytes
2026-02-11T16:39:11.964208Z DEBUG p2p_chat::web: Received screen frame: 240885 bytes
2026-02-11T16:39:12.074572Z DEBUG p2p_chat::web: Received screen frame: 240821 bytes
2026-02-11T16:39:12.183808Z DEBUG p2p_chat::web: Received screen frame: 205533 bytes
2026-02-11T16:39:12.289266Z DEBUG p2p_chat::web: Received screen frame: 183402 bytes
2026-02-11T16:39:12.398940Z DEBUG p2p_chat::web: Received screen frame: 173981 bytes
2026-02-11T16:39:12.510199Z DEBUG p2p_chat::web: Received screen frame: 188848 bytes
2026-02-11T16:39:12.617482Z DEBUG p2p_chat::web: Received screen frame: 193525 bytes
2026-02-11T16:39:12.727529Z DEBUG p2p_chat::web: Received screen frame: 194740 bytes
2026-02-11T16:39:12.837601Z DEBUG p2p_chat::web: Received screen frame: 195227 bytes
2026-02-11T16:39:12.947246Z DEBUG p2p_chat::web: Received screen frame: 195045 bytes
2026-02-11T16:39:13.057826Z DEBUG p2p_chat::web: Received screen frame: 197200 bytes
2026-02-11T16:39:13.167514Z DEBUG p2p_chat::web: Received screen frame: 197880 bytes
2026-02-11T16:39:13.276403Z DEBUG p2p_chat::web: Received screen frame: 196031 bytes
2026-02-11T16:39:13.391202Z DEBUG p2p_chat::web: Received screen frame: 195397 bytes
2026-02-11T16:39:13.501947Z DEBUG p2p_chat::web: Received screen frame: 195887 bytes
2026-02-11T16:39:13.612095Z DEBUG p2p_chat::web: Received screen frame: 195327 bytes
2026-02-11T16:39:13.723780Z DEBUG p2p_chat::web: Received screen frame: 194351 bytes
2026-02-11T16:39:13.833604Z DEBUG p2p_chat::web: Received screen frame: 194112 bytes
2026-02-11T16:39:13.945154Z DEBUG p2p_chat::web: Received screen frame: 194037 bytes
2026-02-11T16:39:14.055624Z DEBUG p2p_chat::web: Received screen frame: 194300 bytes
2026-02-11T16:39:14.167014Z DEBUG p2p_chat::web: Received screen frame: 194850 bytes
2026-02-11T16:39:14.276563Z DEBUG p2p_chat::web: Received screen frame: 195111 bytes
2026-02-11T16:39:14.387088Z DEBUG p2p_chat::web: Received screen frame: 195110 bytes
2026-02-11T16:39:14.497698Z DEBUG p2p_chat::web: Received screen frame: 195130 bytes
2026-02-11T16:39:14.607940Z DEBUG p2p_chat::web: Received screen frame: 195102 bytes
2026-02-11T16:39:14.719830Z DEBUG p2p_chat::web: Received screen frame: 194979 bytes
2026-02-11T16:39:14.824272Z DEBUG p2p_chat::web: Received screen frame: 196605 bytes
2026-02-11T16:39:14.928138Z DEBUG p2p_chat::web: Received screen frame: 196693 bytes
2026-02-11T16:39:15.031800Z DEBUG p2p_chat::web: Received screen frame: 196729 bytes
2026-02-11T16:39:15.136021Z DEBUG p2p_chat::web: Received screen frame: 196773 bytes
2026-02-11T16:39:15.240535Z DEBUG p2p_chat::web: Received screen frame: 196762 bytes
2026-02-11T16:39:15.343535Z DEBUG p2p_chat::web: Received screen frame: 196796 bytes
2026-02-11T16:39:15.447840Z DEBUG p2p_chat::web: Received screen frame: 196740 bytes
2026-02-11T16:39:15.556055Z DEBUG p2p_chat::web: Received screen frame: 196648 bytes
2026-02-11T16:39:15.658166Z DEBUG p2p_chat::web: Received screen frame: 196665 bytes
2026-02-11T16:39:15.762218Z DEBUG p2p_chat::web: Received screen frame: 196719 bytes
2026-02-11T16:39:15.865760Z DEBUG p2p_chat::web: Received screen frame: 196807 bytes
2026-02-11T16:39:15.970619Z DEBUG p2p_chat::web: Received screen frame: 196632 bytes
2026-02-11T16:39:16.078141Z DEBUG p2p_chat::web: Received screen frame: 196625 bytes
2026-02-11T16:39:16.182434Z DEBUG p2p_chat::web: Received screen frame: 196632 bytes
2026-02-11T16:39:16.286185Z DEBUG p2p_chat::web: Received screen frame: 196756 bytes
2026-02-11T16:39:16.390383Z DEBUG p2p_chat::web: Received screen frame: 196670 bytes
2026-02-11T16:39:16.493955Z DEBUG p2p_chat::web: Received screen frame: 196580 bytes
2026-02-11T16:39:16.597849Z DEBUG p2p_chat::web: Received screen frame: 196640 bytes
2026-02-11T16:39:16.701687Z DEBUG p2p_chat::web: Received screen frame: 196640 bytes
2026-02-11T16:39:16.805453Z DEBUG p2p_chat::web: Received screen frame: 196816 bytes
2026-02-11T16:39:16.909458Z DEBUG p2p_chat::web: Received screen frame: 196678 bytes
2026-02-11T16:39:17.013447Z DEBUG p2p_chat::web: Received screen frame: 196690 bytes
2026-02-11T16:39:17.117657Z DEBUG p2p_chat::web: Received screen frame: 196706 bytes
2026-02-11T16:39:17.221270Z DEBUG p2p_chat::web: Received screen frame: 196670 bytes
2026-02-11T16:39:17.326489Z DEBUG p2p_chat::web: Received screen frame: 196790 bytes
2026-02-11T16:39:17.430832Z DEBUG p2p_chat::web: Received screen frame: 196646 bytes
2026-02-11T16:39:17.533849Z DEBUG p2p_chat::web: Received screen frame: 196761 bytes
2026-02-11T16:39:17.637874Z DEBUG p2p_chat::web: Received screen frame: 196659 bytes
2026-02-11T16:39:17.741621Z DEBUG p2p_chat::web: Received screen frame: 196620 bytes
2026-02-11T16:39:17.846016Z DEBUG p2p_chat::web: Received screen frame: 196572 bytes
2026-02-11T16:39:17.950199Z DEBUG p2p_chat::web: Received screen frame: 196698 bytes
2026-02-11T16:39:18.030957Z WARN gossip{me=7674721a58}: iroh_quinn_proto::connection: sent PATH_ABANDON after path was already discarded
2026-02-11T16:39:18.053172Z DEBUG p2p_chat::web: Received screen frame: 196633 bytes
2026-02-11T16:39:18.158287Z DEBUG p2p_chat::web: Received screen frame: 196661 bytes
2026-02-11T16:39:18.262001Z DEBUG p2p_chat::web: Received screen frame: 196633 bytes
2026-02-11T16:39:18.365864Z DEBUG p2p_chat::web: Received screen frame: 196644 bytes
2026-02-11T16:39:18.470708Z DEBUG p2p_chat::web: Received screen frame: 196648 bytes
2026-02-11T16:39:18.574270Z DEBUG p2p_chat::web: Received screen frame: 196553 bytes
2026-02-11T16:39:18.678087Z DEBUG p2p_chat::web: Received screen frame: 196625 bytes
2026-02-11T16:39:18.782939Z DEBUG p2p_chat::web: Received screen frame: 196716 bytes
2026-02-11T16:39:18.886037Z DEBUG p2p_chat::web: Received screen frame: 196731 bytes
2026-02-11T16:39:18.990368Z DEBUG p2p_chat::web: Received screen frame: 196602 bytes
2026-02-11T16:39:19.095809Z DEBUG p2p_chat::web: Received screen frame: 196629 bytes
2026-02-11T16:39:19.200459Z DEBUG p2p_chat::web: Received screen frame: 195918 bytes
2026-02-11T16:39:19.305117Z DEBUG p2p_chat::web: Received screen frame: 194321 bytes
2026-02-11T16:39:19.409039Z DEBUG p2p_chat::web: Received screen frame: 195261 bytes
2026-02-11T16:39:19.512643Z DEBUG p2p_chat::web: Received screen frame: 195586 bytes
2026-02-11T16:39:19.616201Z DEBUG p2p_chat::web: Received screen frame: 195655 bytes
2026-02-11T16:39:19.720491Z DEBUG p2p_chat::web: Received screen frame: 195665 bytes
2026-02-11T16:39:19.824103Z DEBUG p2p_chat::web: Received screen frame: 195673 bytes
2026-02-11T16:39:19.928319Z DEBUG p2p_chat::web: Received screen frame: 195640 bytes
2026-02-11T16:39:20.032662Z DEBUG p2p_chat::web: Received screen frame: 195635 bytes
2026-02-11T16:39:20.136998Z DEBUG p2p_chat::web: Received screen frame: 195668 bytes
2026-02-11T16:39:20.241713Z DEBUG p2p_chat::web: Received screen frame: 195635 bytes
2026-02-11T16:39:20.346111Z DEBUG p2p_chat::web: Received screen frame: 195630 bytes
2026-02-11T16:39:20.450563Z DEBUG p2p_chat::web: Received screen frame: 195638 bytes
2026-02-11T16:39:20.557656Z DEBUG p2p_chat::web: Received screen frame: 195622 bytes
2026-02-11T16:39:34.425339Z ERROR p2p_chat::media::voice: Voice sender web error: Failed to connect for media stream
2026-02-11T16:39:34.428904Z WARN iroh_quinn_proto::connection: remote server configuration might cause nat traversal issues max_local_addresses=12 remote_cid_limit=5
2026-02-11T16:39:34.431196Z INFO p2p_chat::media: Accepted Audio stream from PublicKey(69c0253ed357440bf107a5eab12d790b0fe04346dfd4cf38cc4dc5be8ea60d7f)
2026-02-11T16:39:34.431287Z INFO p2p_chat::media::voice: Incoming voice stream started (web)
2026-02-11T17:17:42.956205Z INFO p2p_chat::web: Web interface listening on http://127.0.0.1:6969
2026-02-11T17:18:06.892422Z WARN p2p_chat::media::voice: Failed to open voice stream to 5b21d1120a04f6c9647ab3354ad75ce305f4f4dc3b5cc9cb93eeae70778cd12f: Failed to connect for media stream
2026-02-11T17:18:06.893138Z WARN iroh_quinn_proto::connection: remote server configuration might cause nat traversal issues max_local_addresses=12 remote_cid_limit=5

88
src/chat/mod.rs Normal file
View File

@@ -0,0 +1,88 @@
//! Chat module — manages chat history and message sending/receiving.
use crate::protocol::{ChatMessage, GossipMessage};
use crate::net::NetworkManager;
use anyhow::Result;
/// Stored chat entry with display metadata.
#[derive(Debug, Clone)]
pub struct ChatEntry {
pub sender_name: String,
pub timestamp: u64,
pub text: String,
pub is_self: bool,
pub is_system: bool,
}
/// Chat state: in-memory history and configuration.
pub struct ChatState {
pub history: Vec<ChatEntry>,
pub our_name: String,
pub max_history: usize,
}
impl ChatState {
pub fn new(our_name: String) -> Self {
Self {
history: Vec::new(),
our_name,
max_history: 10_000,
}
}
/// Send a chat message via gossip broadcast.
pub async fn send_message(&mut self, text: String, net: &NetworkManager) -> Result<()> {
let timestamp = chrono::Utc::now().timestamp_millis() as u64;
let msg = ChatMessage {
sender_name: self.our_name.clone(),
timestamp,
text: text.clone(),
};
// Broadcast via gossip
net.broadcast(&GossipMessage::Chat(msg)).await?;
// Add to local history
self.add_entry(ChatEntry {
sender_name: self.our_name.clone(),
timestamp,
text,
is_self: true,
is_system: false,
});
Ok(())
}
/// Handle a received chat message from gossip.
pub fn receive_message(&mut self, msg: ChatMessage) {
self.add_entry(ChatEntry {
sender_name: msg.sender_name,
timestamp: msg.timestamp,
text: msg.text,
is_self: false,
is_system: false,
});
}
/// Add a system message (peer join/leave, file events, etc.).
pub fn add_system_message(&mut self, text: String) {
let timestamp = chrono::Utc::now().timestamp_millis() as u64;
self.add_entry(ChatEntry {
sender_name: String::from("SYSTEM"),
timestamp,
text,
is_self: false,
is_system: true,
});
}
fn add_entry(&mut self, entry: ChatEntry) {
self.history.push(entry);
// Trim if exceeding max history
if self.history.len() > self.max_history {
let drain = self.history.len() - self.max_history;
self.history.drain(..drain);
}
}
}

194
src/config.rs Normal file
View File

@@ -0,0 +1,194 @@
use std::path::PathBuf;
use std::fs;
use anyhow::Result;
use directories::ProjectDirs;
use serde::{Deserialize, Serialize};
use ratatui::style::Color;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct AppConfig {
#[serde(default)]
pub network: NetworkConfig,
#[serde(default)]
pub media: MediaConfig,
#[serde(default)]
pub ui: UiConfig,
}
impl Default for AppConfig {
fn default() -> Self {
Self {
network: NetworkConfig::default(),
media: MediaConfig::default(),
ui: UiConfig::default(),
}
}
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct NetworkConfig {
pub topic: Option<String>,
}
impl Default for NetworkConfig {
fn default() -> Self {
Self { topic: None }
}
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct MediaConfig {
pub screen_resolution: String,
pub mic_name: Option<String>,
pub speaker_name: Option<String>,
#[serde(default = "default_bitrate")]
pub mic_bitrate: u32,
}
fn default_bitrate() -> u32 {
128000
}
impl Default for MediaConfig {
fn default() -> Self {
Self {
screen_resolution: "1280x720".to_string(),
mic_name: None,
speaker_name: None,
mic_bitrate: 128000,
}
}
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct UiConfig {
pub border: String,
pub text: String,
pub self_name: String,
pub peer_name: String,
pub system_msg: String,
pub time: String,
}
impl Default for UiConfig {
fn default() -> Self {
Self {
border: "cyan".to_string(),
text: "white".to_string(),
self_name: "green".to_string(),
peer_name: "magenta".to_string(),
system_msg: "yellow".to_string(),
time: "dark_gray".to_string(),
}
}
}
impl AppConfig {
pub fn load() -> Result<Self> {
let config_path = Self::get_config_path();
if !config_path.exists() {
// Create default config if it doesn't exist
let default_config = Self::default();
if let Some(parent) = config_path.parent() {
fs::create_dir_all(parent)?;
}
let toml = toml::to_string_pretty(&default_config)?;
fs::write(&config_path, toml)?;
return Ok(default_config);
}
let content = fs::read_to_string(&config_path)?;
let config: AppConfig = toml::from_str(&content)?;
Ok(config)
}
fn get_config_path() -> PathBuf {
if let Some(proj_dirs) = ProjectDirs::from("com", "p2p-chat", "p2p-chat") {
proj_dirs.config_dir().join("config.toml")
} else {
PathBuf::from("config.toml")
}
}
/// Save the current configuration to disk.
pub fn save(&self) -> Result<()> {
let config_path = Self::get_config_path();
if let Some(parent) = config_path.parent() {
fs::create_dir_all(parent)?;
}
let toml = toml::to_string_pretty(self)?;
fs::write(&config_path, toml)?;
Ok(())
}
}
// Helper to parse color strings
pub fn parse_color(color_str: &str) -> Color {
if color_str.starts_with('#') {
let hex = color_str.trim_start_matches('#');
if let Ok(val) = u32::from_str_radix(hex, 16) {
let (r, g, b) = if hex.len() == 3 {
// #RGB -> #RRGGBB
let r = ((val >> 8) & 0xF) * 17;
let g = ((val >> 4) & 0xF) * 17;
let b = (val & 0xF) * 17;
(r as u8, g as u8, b as u8)
} else {
// #RRGGBB
let r = (val >> 16) & 0xFF;
let g = (val >> 8) & 0xFF;
let b = val & 0xFF;
(r as u8, g as u8, b as u8)
};
return Color::Rgb(r, g, b);
}
}
match color_str.to_lowercase().as_str() {
"black" => Color::Black,
"red" => Color::Red,
"green" => Color::Green,
"yellow" => Color::Yellow,
"blue" => Color::Blue,
"magenta" => Color::Magenta,
"cyan" => Color::Cyan,
"gray" => Color::Gray,
"dark_gray" | "darkgray" => Color::DarkGray,
"light_red" | "lightred" => Color::LightRed,
"light_green" | "lightgreen" => Color::LightGreen,
"light_yellow" | "lightyellow" => Color::LightYellow,
"light_blue" | "lightblue" => Color::LightBlue,
"light_magenta" | "lightmagenta" => Color::LightMagenta,
"light_cyan" | "lightcyan" => Color::LightCyan,
"white" => Color::White,
_ => {
// Try hex parsing if needed, but for now fallback to White
Color::White
}
}
}
// Runtime Theme struct derived from config
#[derive(Debug, Clone)]
pub struct Theme {
pub border: Color,
pub text: Color,
pub self_name: Color,
pub peer_name: Color,
pub system_msg: Color,
pub time: Color,
}
impl From<UiConfig> for Theme {
fn from(cfg: UiConfig) -> Self {
Self {
border: parse_color(&cfg.border),
text: parse_color(&cfg.text),
self_name: parse_color(&cfg.self_name),
peer_name: parse_color(&cfg.peer_name),
system_msg: parse_color(&cfg.system_msg),
time: parse_color(&cfg.time),
}
}
}

351
src/file_transfer/mod.rs Normal file
View File

@@ -0,0 +1,351 @@
//! File transfer module — chunked file transfers over dedicated QUIC streams.
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use anyhow::{Context, Result};
use sha2::{Sha256, Digest};
use tokio::fs::File;
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use crate::protocol::{
decode_framed, write_framed, FileChunk, FileDone, FileId, FileOffer,
FileStreamMessage, FileAcceptReject, new_file_id,
};
/// Chunk size for file transfers (64 KB).
const CHUNK_SIZE: usize = 64 * 1024;
/// State of a file transfer.
#[derive(Debug, Clone)]
#[allow(dead_code)]
pub enum TransferState {
/// We offered a file, waiting for accept/reject.
Offering,
/// Transfer is in progress.
Transferring {
bytes_transferred: u64,
total_size: u64,
},
/// Transfer completed successfully.
Complete,
/// Transfer was rejected by the peer.
Rejected,
/// Transfer failed with an error.
Failed(String),
}
/// Information about a tracked file transfer.
#[derive(Debug, Clone)]
#[allow(dead_code)]
pub struct TransferInfo {
pub file_id: FileId,
pub file_name: String,
pub file_size: u64,
pub state: TransferState,
pub is_outgoing: bool,
}
use std::sync::{Arc, Mutex};
/// Manages file transfers.
#[derive(Clone)]
pub struct FileTransferManager {
pub transfers: Arc<Mutex<HashMap<FileId, TransferInfo>>>,
#[allow(dead_code)]
pub download_dir: PathBuf,
}
impl FileTransferManager {
pub fn new(download_dir: PathBuf) -> Self {
Self {
transfers: Arc::new(Mutex::new(HashMap::new())),
download_dir,
}
}
/// Initiate sending a file to a peer.
/// Returns the file ID and the file offer for broadcasting.
pub async fn prepare_send(
&self,
file_path: &Path,
) -> Result<(FileId, FileOffer)> {
let file_name = file_path
.file_name()
.context("No filename")?
.to_string_lossy()
.to_string();
let metadata = tokio::fs::metadata(file_path)
.await
.context("Failed to read file metadata")?;
let file_size = metadata.len();
// Compute SHA-256 checksum
let mut file = File::open(file_path).await?;
let mut hasher = Sha256::new();
let mut buf = vec![0u8; CHUNK_SIZE];
loop {
let n = file.read(&mut buf).await?;
if n == 0 {
break;
}
hasher.update(&buf[..n]);
}
let checksum: [u8; 32] = hasher.finalize().into();
let file_id = new_file_id();
let offer = FileOffer {
file_id,
name: file_name.clone(),
size: file_size,
checksum,
};
{
let mut transfers = self.transfers.lock().unwrap();
transfers.insert(
file_id,
TransferInfo {
file_id,
file_name,
file_size,
state: TransferState::Offering,
is_outgoing: true,
},
);
}
Ok((file_id, offer))
}
/// Execute the sending side of a file transfer over a QUIC bi-stream.
#[allow(dead_code)]
pub async fn execute_send(
&self,
file_id: FileId,
file_path: &Path,
offer: FileOffer,
send: &mut iroh::endpoint::SendStream,
recv: &mut iroh::endpoint::RecvStream,
) -> Result<()> {
// Send the offer
write_framed(send, &FileStreamMessage::Offer(offer)).await?;
// Wait for accept or reject
let response: FileStreamMessage = decode_framed(recv).await?;
match response {
FileStreamMessage::Accept(_) => {
// Proceed with transfer
}
FileStreamMessage::Reject(_) => {
let mut transfers = self.transfers.lock().unwrap();
if let Some(info) = transfers.get_mut(&file_id) {
info.state = TransferState::Rejected;
}
return Ok(());
}
_ => {
anyhow::bail!("Unexpected response to file offer");
}
}
// Stream file chunks
let mut file = File::open(file_path).await?;
let mut offset: u64 = 0;
let total_size = tokio::fs::metadata(file_path).await?.len();
let mut buf = vec![0u8; CHUNK_SIZE];
loop {
let n = file.read(&mut buf).await?;
if n == 0 {
break;
}
let chunk = FileStreamMessage::Chunk(FileChunk {
file_id,
offset,
data: buf[..n].to_vec(),
});
write_framed(send, &chunk).await?;
offset += n as u64;
// Update progress
// Scope limit the lock
{
let mut transfers = self.transfers.lock().unwrap();
if let Some(info) = transfers.get_mut(&file_id) {
info.state = TransferState::Transferring {
bytes_transferred: offset,
total_size,
};
}
}
}
// Send done
write_framed(send, &FileStreamMessage::Done(FileDone { file_id })).await?;
{
let mut transfers = self.transfers.lock().unwrap();
if let Some(info) = transfers.get_mut(&file_id) {
info.state = TransferState::Complete;
}
}
Ok(())
}
/// Handle an incoming file offer for display.
#[allow(dead_code)]
/// Handle an incoming file offer for display.
#[allow(dead_code)]
pub fn register_incoming_offer(&self, offer: &FileOffer) {
let mut transfers = self.transfers.lock().unwrap();
transfers.insert(
offer.file_id,
TransferInfo {
file_id: offer.file_id,
file_name: offer.name.clone(),
file_size: offer.size,
state: TransferState::Offering,
is_outgoing: false,
},
);
}
/// Execute the receiving side of a file transfer over a QUIC bi-stream.
#[allow(dead_code)]
pub async fn execute_receive(
&self,
send: &mut iroh::endpoint::SendStream,
recv: &mut iroh::endpoint::RecvStream,
) -> Result<FileId> {
// Read the offer
let msg: FileStreamMessage = decode_framed(recv).await?;
let offer = match msg {
FileStreamMessage::Offer(o) => o,
_ => anyhow::bail!("Expected file offer"),
};
let file_id = offer.file_id;
self.register_incoming_offer(&offer);
// Auto-accept for now
write_framed(
send,
&FileStreamMessage::Accept(FileAcceptReject { file_id }),
)
.await?;
// Receive chunks
let dest_path = self.download_dir.join(&offer.name);
// Ensure download dir exists for safety (though main normally does this)
if let Some(parent) = dest_path.parent() {
let _ = tokio::fs::create_dir_all(parent).await;
}
let mut file = tokio::fs::OpenOptions::new()
.create(true)
.write(true)
.truncate(true)
.open(&dest_path)
.await?;
let mut received: u64 = 0;
loop {
let chunk_msg: FileStreamMessage = decode_framed(recv).await?;
match chunk_msg {
FileStreamMessage::Chunk(chunk) => {
file.write_all(&chunk.data).await?;
received += chunk.data.len() as u64;
{
let mut transfers = self.transfers.lock().unwrap();
if let Some(info) = transfers.get_mut(&file_id) {
info.state = TransferState::Transferring {
bytes_transferred: received,
total_size: offer.size,
};
}
}
}
FileStreamMessage::Done(_) => {
break;
}
_ => {
anyhow::bail!("Unexpected message during file transfer");
}
}
}
file.flush().await?;
{
let mut transfers = self.transfers.lock().unwrap();
if let Some(info) = transfers.get_mut(&file_id) {
info.state = TransferState::Complete;
}
}
Ok(file_id)
}
/// Get a summary of active/recent transfers for display.
pub fn active_transfers(&self) -> Vec<TransferInfo> {
let transfers = self.transfers.lock().unwrap();
transfers.values().cloned().collect()
}
/// Format transfer progress as a human-readable string.
pub fn format_progress(info: &TransferInfo) -> String {
let direction = if info.is_outgoing { "" } else { "" };
match &info.state {
TransferState::Offering => {
format!("{} {} (waiting...)", direction, info.file_name)
}
TransferState::Transferring {
bytes_transferred,
total_size,
} => {
let pct = if *total_size > 0 {
(*bytes_transferred as f64 / *total_size as f64 * 100.0) as u8
} else {
0
};
format!(
"{} {} {}% ({}/{})",
direction,
info.file_name,
pct,
format_bytes(*bytes_transferred),
format_bytes(*total_size)
)
}
TransferState::Complete => {
format!("{} {} ✓ complete", direction, info.file_name)
}
TransferState::Rejected => {
format!("{} {} ✗ rejected", direction, info.file_name)
}
TransferState::Failed(e) => {
format!("{} {}{}", direction, info.file_name, e)
}
}
}
}
fn format_bytes(bytes: u64) -> String {
if bytes < 1024 {
format!("{}B", bytes)
} else if bytes < 1024 * 1024 {
format!("{:.1}KiB", bytes as f64 / 1024.0)
} else if bytes < 1024 * 1024 * 1024 {
format!("{:.1}MiB", bytes as f64 / (1024.0 * 1024.0))
} else {
format!("{:.1}GiB", bytes as f64 / (1024.0 * 1024.0 * 1024.0))
}
}

10
src/lib.rs Normal file
View File

@@ -0,0 +1,10 @@
//! P2P Chat — library crate exposing modules for integration tests.
pub mod chat;
pub mod config;
pub mod file_transfer;
pub mod media;
pub mod net;
pub mod protocol;
pub mod tui;
pub mod web;

608
src/main.rs Normal file
View File

@@ -0,0 +1,608 @@
//! P2P Chat Application
//!
//! A Linux-only, terminal-based peer-to-peer communication app.
//! Chat is the primary feature, file transfer is first-class,
//! voice/camera/screen are optional, powered by PipeWire.
mod chat;
mod config;
mod file_transfer;
mod media;
mod net;
mod protocol;
mod tui;
mod web;
use std::io;
use std::path::PathBuf;
use anyhow::{Context, Result};
use clap::Parser;
use crossterm::event::{Event, EventStream};
use crossterm::terminal::{
disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen,
};
use crossterm::execute;
use iroh::EndpointId;
use n0_future::StreamExt;
use ratatui::backend::CrosstermBackend;
use ratatui::Terminal;
use tokio::sync::mpsc;
use crate::chat::ChatState;
use crate::config::AppConfig;
use crate::file_transfer::FileTransferManager;
use crate::media::MediaState;
use crate::net::{NetEvent, NetworkManager, PeerInfo};
use crate::protocol::{CapabilitiesMessage, GossipMessage, PeerAnnounce};
use crate::tui::{App, TuiCommand};
/// P2P Chat — decentralized chat over QUIC
#[derive(Parser, Debug)]
#[command(name = "p2p-chat", about = "Peer-to-peer chat over QUIC")]
struct Cli {
/// Your display name
#[arg(short, long, default_value = "anon")]
name: String,
/// Peer endpoint ID to join (hex string)
#[arg(short, long)]
join: Option<String>,
/// Topic room ID (32-byte hex). Peers on the same topic can chat.
#[arg(short, long)]
topic: Option<String>, // Changed to Option to fallback to config
/// Download directory for received files
#[arg(short, long, default_value = "~/Downloads")]
download_dir: String,
/// Screen resolution for sharing (e.g., 1280x720, 1920x1080)
#[arg(long)]
screen_resolution: Option<String>, // Changed to Option to fallback to config
}
#[tokio::main]
async fn main() -> Result<()> {
// ... tracing init ...
// Initialize tracing to file (not stdout, since we use TUI)
let _tracing_guard = tracing_subscriber::fmt()
.with_env_filter(
tracing_subscriber::EnvFilter::from_default_env()
.add_directive("p2p_chat=debug".parse()?)
.add_directive("iroh=warn".parse()?)
.add_directive("iroh_gossip=warn".parse()?),
)
.with_writer(|| -> Box<dyn io::Write + Send> {
match std::fs::OpenOptions::new()
.create(true)
.append(true)
.open("p2p-chat.log")
{
Ok(f) => Box::new(f),
Err(_) => Box::new(io::sink()),
}
})
.with_ansi(false)
.init();
// Load config
let config = AppConfig::load().unwrap_or_else(|e| {
eprintln!("Warning: Failed to load config: {}", e);
AppConfig::default()
});
let cli = Cli::parse();
// Resolution: CLI > Config > Default
let res_str = cli.screen_resolution.as_deref().unwrap_or(&config.media.screen_resolution);
let screen_res = parse_resolution(res_str).unwrap_or((1280, 720));
// Topic: CLI > Config > Default
let topic_str = cli.topic.as_deref().or(config.network.topic.as_deref()).unwrap_or("00000000000000000000000000000000");
let topic_bytes = parse_topic(topic_str)?;
// ... networking init ...
// Initialize networking
let (mut net_mgr, _net_tx, mut net_rx) =
NetworkManager::new(topic_bytes).await.context("Failed to start networking")?;
let our_id = net_mgr.our_id;
let our_id_short = format!("{}", our_id)
.chars()
.take(8)
.collect::<String>();
// Initialize application state
let mut chat = ChatState::new(cli.name.clone());
// Resolve download directory, handling ~
let download_path = if cli.download_dir.starts_with("~/") {
if let Ok(home) = std::env::var("HOME") {
PathBuf::from(home).join(&cli.download_dir[2..])
} else {
PathBuf::from(&cli.download_dir)
}
} else {
PathBuf::from(&cli.download_dir)
};
// Ensure download directory exists
tokio::fs::create_dir_all(&download_path).await?;
let file_mgr = FileTransferManager::new(download_path);
// Pass mic name from config if present
let mut media = MediaState::new(
screen_res,
config.media.mic_name.clone(),
config.media.speaker_name.clone(),
config.media.mic_bitrate,
);
// Initialize App with Theme
let theme = crate::config::Theme::from(config.ui.clone());
let mut app = App::new(theme);
let mut connected = false;
// If a peer was specified, add it and bootstrap
let bootstrap_peers = if let Some(ref join_id) = cli.join {
let peer_id: EndpointId = join_id.parse().context(
"Invalid peer ID. Expected a hex-encoded endpoint ID.",
)?;
vec![peer_id]
} else {
vec![]
};
// We need a sender for the gossip event loop — but we already have net_rx
// Recreate the channel and use it for gossip
let (gossip_event_tx, mut gossip_event_rx) = mpsc::channel::<NetEvent>(256);
// Join the gossip topic
net_mgr
.join_gossip(bootstrap_peers, gossip_event_tx)
.await
.context("Failed to join gossip")?;
// Announce ourselves
let announce = GossipMessage::PeerAnnounce(PeerAnnounce {
sender_name: cli.name.clone(),
});
// Don't fail if no peers yet
let _ = net_mgr.broadcast(&announce).await;
// Insert ourselves into the peer list
{
let mut peers = net_mgr.peers.lock().await;
peers.insert(our_id, PeerInfo {
id: our_id,
name: Some(cli.name.clone()),
capabilities: None,
is_self: true,
});
}
// Broadcast capabilities
let caps = GossipMessage::Capabilities(CapabilitiesMessage {
sender_name: cli.name.clone(),
..Default::default()
});
let _ = net_mgr.broadcast(&caps).await;
chat.add_system_message(format!("Welcome, {}! Your ID: {}", cli.name, our_id_short));
chat.add_system_message(format!("Full Endpoint ID: {}", our_id));
if cli.join.is_some() {
chat.add_system_message("Connecting to peer...".to_string());
} else {
chat.add_system_message(
"Waiting for peers. Share your Endpoint ID for others to join.".to_string(),
);
}
// Start Web Interface
// Start Web Interface
// Start Web Interface
tokio::spawn(crate::web::start_web_server(
media.broadcast_tx.clone(),
media.mic_broadcast.clone(),
media.cam_broadcast.clone(),
media.screen_broadcast.clone(),
));
// Setup terminal
enable_raw_mode()?;
let mut stdout = io::stdout();
execute!(stdout, EnterAlternateScreen)?;
let backend = CrosstermBackend::new(stdout);
let mut terminal = Terminal::new(backend)?;
// Main event loop
let mut our_name = cli.name.clone();
let result = run_event_loop(
&mut terminal,
&mut app,
&mut chat,
file_mgr.clone(),
&mut media,
net_mgr.clone(),
&mut net_rx,
&mut gossip_event_rx,
&mut our_name,
&our_id_short,
&mut connected,
)
.await;
// Restore terminal
disable_raw_mode()?;
execute!(terminal.backend_mut(), LeaveAlternateScreen)?;
terminal.show_cursor()?;
// Shutdown networking
let _ = net_mgr.shutdown().await;
result
}
async fn run_event_loop(
terminal: &mut Terminal<CrosstermBackend<io::Stdout>>,
app: &mut App,
chat: &mut ChatState,
file_mgr: FileTransferManager,
media: &mut MediaState,
net: NetworkManager,
net_rx: &mut mpsc::Receiver<NetEvent>,
gossip_rx: &mut mpsc::Receiver<NetEvent>,
our_name: &mut String,
our_id_short: &str,
connected: &mut bool,
) -> Result<()> {
let mut event_stream = EventStream::new();
loop {
// Collect peers for rendering — self is always first
let peers: Vec<_> = {
let p = net.peers.lock().await;
let mut all: Vec<_> = p.values().cloned().collect();
all.sort_by_key(|p| !p.is_self); // self first
all
};
*connected = peers.iter().any(|p| !p.is_self);
terminal.draw(|f| {
tui::render(
f,
app,
chat,
&file_mgr,
media,
&peers,
our_name,
our_id_short,
*connected,
);
})?;
// Wait for events
tokio::select! {
// Terminal/keyboard events
maybe_event = event_stream.next() => {
match maybe_event {
Some(Ok(Event::Key(key))) => {
let cmd = app.handle_key(key);
match cmd {
TuiCommand::SendMessage(text) => {
if let Err(e) = chat.send_message(text, &net).await {
chat.add_system_message(format!("Send error: {}", e));
}
}
TuiCommand::SystemMessage(text) => {
chat.add_system_message(text);
}
TuiCommand::ToggleVoice => {
let status = media.toggle_voice(net.clone()).await;
chat.add_system_message(status.to_string());
}
TuiCommand::ToggleCamera => {
let status = media.toggle_camera(net.clone()).await;
chat.add_system_message(status.to_string());
}
TuiCommand::ToggleScreen => {
let status = media.toggle_screen(net.clone()).await;
chat.add_system_message(status.to_string());
}
TuiCommand::Quit => {
// Broadcast disconnect to peers
let disconnect_msg = GossipMessage::Disconnect {
sender_name: our_name.clone(),
};
let _ = net.broadcast(&disconnect_msg).await;
// Brief delay to let the message propagate
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
media.shutdown();
return Ok(());
}
TuiCommand::ChangeNick(new_nick) => {
let old = our_name.clone();
*our_name = new_nick.clone();
chat.our_name = new_nick.clone();
// Update self in peer list
{
let mut peers = net.peers.lock().await;
if let Some(self_peer) = peers.get_mut(&net.our_id) {
self_peer.name = Some(new_nick.clone());
}
}
chat.add_system_message(
format!("Nickname changed: {}{}", old, new_nick),
);
// Broadcast name change to all peers
let msg = GossipMessage::NameChange(
protocol::NameChange {
old_name: old,
new_name: new_nick,
},
);
let _ = net.broadcast(&msg).await;
}
TuiCommand::Connect(peer_id_str) => {
match peer_id_str.parse::<crate::net::EndpointId>() {
Ok(peer_id) => {
chat.add_system_message(format!("Connecting to {}...", peer_id));
if let Err(e) = net.connect(peer_id).await {
chat.add_system_message(format!("Connection failed: {}", e));
} else {
chat.add_system_message("Connection initiated.".to_string());
}
}
Err(_) => {
chat.add_system_message(format!("Invalid peer ID: {}", peer_id_str));
}
}
}
TuiCommand::SendFile(path) => {
chat.add_system_message(format!("Preparing to send file: {:?}", path));
if !path.exists() {
chat.add_system_message(format!("File not found: {}", path.display()));
} else {
let file_mgr = file_mgr.clone();
// Prepare send (hashing) - blocking for now, ideally async task
match file_mgr.prepare_send(&path).await {
Ok((file_id, offer)) => {
chat.add_system_message(format!("Offering file: {}", offer.name));
let broadcast = protocol::FileOfferBroadcast {
sender_name: our_name.to_string(),
file_id,
file_name: offer.name.clone(),
file_size: offer.size,
};
let msg = GossipMessage::FileOfferBroadcast(broadcast);
if let Err(e) = net.broadcast(&msg).await {
chat.add_system_message(format!("Failed to broadcast offer: {}", e));
}
}
Err(e) => {
chat.add_system_message(format!("Failed to prepare file: {}", e));
}
}
}
}
TuiCommand::Leave => {
chat.add_system_message("Leaving group chat...".to_string());
media.shutdown();
// Clear peer list (except self)
{
let mut peers = net.peers.lock().await;
peers.retain(|_, info| info.is_self);
}
chat.add_system_message("Session ended. Use /connect <peer_id> to start a new session.".to_string());
// Do NOT exit the application
}
TuiCommand::SelectMic(node_name) => {
media.set_mic_name(Some(node_name.clone()));
// ...
}
TuiCommand::SetBitrate(bps) => {
media.set_bitrate(bps);
chat.add_system_message(format!("🎵 Bitrate set to {} kbps", bps / 1000));
// Save to config
if let Ok(mut cfg) = crate::config::AppConfig::load() {
cfg.media.mic_bitrate = bps;
let _ = cfg.save();
}
}
TuiCommand::SelectSpeaker(node_name) => {
media.set_speaker_name(Some(node_name.clone()));
chat.add_system_message(format!("🔊 Speaker set to: {}", node_name));
// Save to config
if let Ok(mut cfg) = crate::config::AppConfig::load() {
cfg.media.speaker_name = Some(node_name);
if let Err(e) = cfg.save() {
tracing::warn!("Failed to save config: {}", e);
}
}
if media.voice_enabled() {
chat.add_system_message("Restart voice chat to apply changes.".to_string());
}
}
TuiCommand::None => {}
}
}
Some(Ok(Event::Resize(_, _))) => {
// Terminal resize — just redraw on next iteration
}
Some(Err(e)) => {
tracing::error!("Terminal event error: {}", e);
}
None => {
return Ok(());
}
_ => {}
}
}
// Network events from file transfer acceptor
Some(event) = net_rx.recv() => {
handle_net_event(event, chat, file_mgr.clone(), media, &net).await;
}
// Gossip events
Some(event) = gossip_rx.recv() => {
handle_net_event(event, chat, file_mgr.clone(), media, &net).await;
}
// Signal handling (Ctrl+C / SIGTERM)
_ = tokio::signal::ctrl_c() => {
// Broadcast disconnect to peers
let disconnect_msg = GossipMessage::Disconnect {
sender_name: our_name.clone(),
};
let _ = net.broadcast(&disconnect_msg).await;
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
media.shutdown();
return Ok(());
}
}
}
}
async fn handle_net_event(
event: NetEvent,
chat: &mut ChatState,
file_mgr: FileTransferManager,
media: &mut MediaState,
net: &NetworkManager,
) {
match event {
NetEvent::GossipReceived { from, message } => match message {
GossipMessage::Chat(msg) => {
chat.receive_message(msg);
}
GossipMessage::Capabilities(caps) => {
let mut peers = net.peers.lock().await;
if let Some(peer) = peers.get_mut(&from) {
peer.name = Some(caps.sender_name.clone());
peer.capabilities = Some(caps);
}
}
GossipMessage::PeerAnnounce(announce) => {
let mut peers = net.peers.lock().await;
if let Some(peer) = peers.get_mut(&from) {
peer.name = Some(announce.sender_name.clone());
}
let short_id: String = format!("{}", from).chars().take(8).collect();
chat.add_system_message(format!(
"{} ({}) joined",
announce.sender_name, short_id
));
}
GossipMessage::FileOfferBroadcast(offer) => {
chat.add_system_message(format!(
"{} offers file: {} ({} bytes)",
offer.sender_name, offer.file_name, offer.file_size
));
}
GossipMessage::NameChange(change) => {
// Update peer name in the peer list
let mut peers = net.peers.lock().await;
if let Some(peer) = peers.get_mut(&from) {
peer.name = Some(change.new_name.clone());
}
chat.add_system_message(format!(
"✏️ {} is now known as {}",
change.old_name, change.new_name
));
}
GossipMessage::Disconnect { sender_name } => {
// Remove peer from the list
{
let mut peers = net.peers.lock().await;
peers.remove(&from);
}
let short_id: String = format!("{}", from).chars().take(8).collect();
chat.add_system_message(format!(
"👋 {} ({}) disconnected",
sender_name, short_id
));
}
},
NetEvent::PeerUp(peer_id) => {
let short_id: String = format!("{}", peer_id).chars().take(8).collect();
chat.add_system_message(format!("Peer connected: {}", short_id));
}
NetEvent::PeerDown(peer_id) => {
let short_id: String = format!("{}", peer_id).chars().take(8).collect();
chat.add_system_message(format!("Peer disconnected: {}", short_id));
}
NetEvent::IncomingFileStream {
from,
mut send,
mut recv,
} => {
chat.add_system_message("Incoming file transfer...".to_string());
tracing::info!("Incoming file stream from {:?}", from);
// Spawn task to handle transfer
tokio::spawn(async move {
if let Err(e) = file_mgr.execute_receive(&mut send, &mut recv).await {
tracing::error!("File receive failed: {}", e);
}
});
}
NetEvent::IncomingMediaStream {
from,
kind,
send,
recv,
} => {
let short_id: String = format!("{}", from).chars().take(8).collect();
chat.add_system_message(format!("📡 Incoming {:?} stream from {}", kind, short_id));
media.handle_incoming_media(from, kind, send, recv);
}
}
}
fn parse_topic(hex_str: &str) -> Result<[u8; 32]> {
// If it's all zeros (default), generate a deterministic topic
let hex_str = hex_str.trim();
if hex_str.len() != 32 && hex_str.len() != 64 {
// Treat as a room name — hash it to get 32 bytes
use sha2::{Sha256, Digest};
let mut hasher = Sha256::new();
hasher.update(hex_str.as_bytes());
let result = hasher.finalize();
let mut bytes = [0u8; 32];
bytes.copy_from_slice(&result);
return Ok(bytes);
}
// Try parsing as hex
if hex_str.len() == 64 {
let mut bytes = [0u8; 32];
for i in 0..32 {
bytes[i] = u8::from_str_radix(&hex_str[i * 2..i * 2 + 2], 16)
.context("Invalid hex in topic")?;
}
Ok(bytes)
} else {
// 32-char string — treat as room name
use sha2::{Sha256, Digest};
let mut hasher = Sha256::new();
hasher.update(hex_str.as_bytes());
let result = hasher.finalize();
let mut bytes = [0u8; 32];
bytes.copy_from_slice(&result);
Ok(bytes)
}
}
fn parse_resolution(res: &str) -> Option<(u32, u32)> {
let parts: Vec<&str> = res.split('x').collect();
if parts.len() == 2 {
let w = parts[0].parse().ok()?;
let h = parts[1].parse().ok()?;
Some((w, h))
} else {
None
}
}

389
src/media/capture.rs Normal file
View File

@@ -0,0 +1,389 @@
//! Video capture and playback using FFmpeg and MPV.
//!
//! Captures video by spawning an `ffmpeg` process and reading its stdout.
//! Plays video by spawning `mpv` (or `vlc`) and writing to its stdin.
use std::io::Read;
use std::process::{Command as StdCommand, Stdio as StdStdio};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::thread;
use anyhow::{Context, Result};
use tracing;
use crate::protocol::{write_framed, decode_framed, MediaKind, MediaStreamMessage};
use crate::media::WebMediaEvent;
/// Manages a video capture session (camera or screen).
pub struct VideoCapture {
pub kind: MediaKind, // Make kind public or accessible
running: Arc<AtomicBool>,
capture_thread: Option<thread::JoinHandle<()>>,
player_thread: Option<thread::JoinHandle<()>>, // Keep it for now if handle_incoming_video uses it
tasks: Vec<tokio::task::JoinHandle<()>>,
}
/// Chunk of video data from FFmpeg.
struct VideoChunk {
timestamp_ms: u64,
data: Vec<u8>,
}
impl VideoCapture {
/// Start video capture with web input (broadcast receiver).
pub async fn start_web(
kind: MediaKind,
_local_peer_id: iroh::EndpointId,
peers: Vec<iroh::EndpointId>,
network_manager: crate::net::NetworkManager,
input_rx: tokio::sync::broadcast::Sender<Vec<u8>>,
) -> Result<Self> {
let running = Arc::new(AtomicBool::new(true));
// Spawn sender tasks
let mut tasks = Vec::new();
for peer in peers {
let running = running.clone();
let net = network_manager.clone();
let rx = input_rx.subscribe();
let kind = kind.clone();
let task = tokio::spawn(async move {
if let Err(e) = run_video_sender_web(net, peer, kind, rx, running).await {
tracing::error!("Video sender web error: {}", e);
}
});
tasks.push(task);
}
Ok(Self {
kind, // Added kind
running,
capture_thread: None,
player_thread: None,
tasks, // Added tasks
})
}
/// Start capture of the given `kind` and send video data to peers.
pub fn start(
kind: MediaKind,
peer_streams: Vec<(iroh::endpoint::SendStream, iroh::endpoint::RecvStream)>,
resolution: Option<(u32, u32)>,
) -> Result<Self> {
let running = Arc::new(AtomicBool::new(true));
// Use a bounded channel to backpressure ffmpeg if network is slow
let (chunk_tx, chunk_rx) = crossbeam_channel::bounded::<VideoChunk>(64);
let capture_running = running.clone();
let capture_thread = thread::Builder::new()
.name(format!("ffmpeg-{:?}", kind))
.spawn(move || {
if let Err(e) = run_ffmpeg_capture(capture_running, chunk_tx, kind, resolution) {
tracing::error!("FFmpeg capture error: {}", e);
}
})
.context("Failed to spawn ffmpeg capture thread")?;
let mut tasks = Vec::new();
// Parameters for signaling (mostly informative now, as ffmpeg controls generic stream)
let (width, height, fps) = match kind {
MediaKind::Camera => (640, 480, 30),
MediaKind::Screen => {
let (w, h) = resolution.unwrap_or((1920, 1080));
(w, h, 30)
},
_ => (0, 0, 0),
};
for (mut send, _recv) in peer_streams {
let chunk_rx = chunk_rx.clone();
let running = running.clone();
let task = tokio::spawn(async move {
let start = MediaStreamMessage::VideoStart {
kind,
width,
height,
fps,
};
if let Err(e) = write_framed(&mut send, &start).await {
tracing::error!("Failed to send VideoStart: {}", e);
return;
}
let mut sequence: u64 = 0;
loop {
if !running.load(Ordering::Relaxed) {
break;
}
match chunk_rx.recv_timeout(std::time::Duration::from_millis(500)) {
Ok(chunk) => {
let msg = MediaStreamMessage::VideoFrame {
sequence,
timestamp_ms: chunk.timestamp_ms,
data: chunk.data,
};
if let Err(e) = write_framed(&mut send, &msg).await {
tracing::debug!("Failed to send VideoFrame (peer disconnected?): {}", e);
break;
}
sequence += 1;
}
Err(crossbeam_channel::RecvTimeoutError::Timeout) => continue,
Err(crossbeam_channel::RecvTimeoutError::Disconnected) => break,
}
}
let stop = MediaStreamMessage::VideoStop { kind };
let _ = write_framed(&mut send, &stop).await;
});
tasks.push(task);
}
Ok(Self {
kind,
running,
capture_thread: Some(capture_thread),
player_thread: None, // Initialized to None for native capture
tasks,
})
}
/// Stop capture.
pub fn stop(&mut self) {
self.running.store(false, Ordering::Relaxed);
for task in self.tasks.drain(..) {
task.abort();
}
if let Some(handle) = self.capture_thread.take() {
let _ = handle.join();
}
if let Some(handle) = self.player_thread.take() {
let _ = handle.join();
}
}
/// Handle incoming video stream taking the first start message (Web Version).
pub async fn handle_incoming_video_web(
from: iroh::EndpointId,
message: MediaStreamMessage,
mut recv: iroh::endpoint::RecvStream,
broadcast_tx: tokio::sync::broadcast::Sender<WebMediaEvent>,
) -> Result<()> {
let kind = match message {
MediaStreamMessage::VideoStart { kind, .. } => kind,
_ => anyhow::bail!("Invalid start message for video"),
};
tracing::info!("Starting {:?} playback via Web Broadcast", kind);
loop {
match decode_framed::<MediaStreamMessage>(&mut recv).await {
Ok(msg) => match msg {
MediaStreamMessage::VideoFrame { data, .. } => {
// Send to web
let short_id: String = format!("{}", from).chars().take(8).collect();
let _ = broadcast_tx.send(WebMediaEvent::Video { peer_id: short_id, kind, data });
}
MediaStreamMessage::VideoStop { .. } => {
break;
}
_ => {}
},
Err(_) => break, // Stream closed
}
}
Ok(())
}
/// Handle incoming video stream taking the first start message.
pub async fn handle_incoming_video(
message: MediaStreamMessage,
recv: iroh::endpoint::RecvStream,
) -> Result<()> {
match message {
MediaStreamMessage::VideoStart { kind, .. } => {
tracing::info!("Starting {:?} playback via MPV/VLC", kind);
run_player_loop(recv).await
}
_ => anyhow::bail!("Invalid start message for video"),
}
}
}
impl Drop for VideoCapture {
fn drop(&mut self) {
self.stop();
}
}
// ---------------------------------------------------------------------------
// FFmpeg Capture Logic
// ---------------------------------------------------------------------------
async fn run_video_sender_web(
network_manager: crate::net::NetworkManager,
peer: iroh::EndpointId,
kind: MediaKind,
mut input_rx: tokio::sync::broadcast::Receiver<Vec<u8>>,
running: Arc<AtomicBool>,
) -> Result<()> {
let (mut send, _) = network_manager.open_media_stream(peer, kind.clone()).await?;
// For web, we assume fixed resolution and fps for now.
write_framed(&mut send, &MediaStreamMessage::VideoStart { kind, width: 640, height: 480, fps: 30 }).await?;
while running.load(Ordering::Relaxed) {
match input_rx.recv().await {
Ok(data) => {
// Web sends MJPEG chunk (full frame)
let msg = MediaStreamMessage::VideoFrame {
sequence: 0, // Sequence not used for web input, set to 0
timestamp_ms: std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_millis() as u64,
data,
};
if write_framed(&mut send, &msg).await.is_err() {
break;
}
}
Err(tokio::sync::broadcast::error::RecvError::Closed) => break,
Err(tokio::sync::broadcast::error::RecvError::Lagged(_)) => {
tracing::warn!("Video sender lagged");
}
}
}
let _ = write_framed(&mut send, &MediaStreamMessage::VideoStop { kind }).await;
send.finish()?;
Ok(())
}
fn run_ffmpeg_capture(
running: Arc<AtomicBool>,
chunk_tx: crossbeam_channel::Sender<VideoChunk>,
kind: MediaKind,
resolution: Option<(u32, u32)>,
) -> Result<()> {
let mut child = match kind {
MediaKind::Camera => {
// Camera: Use ffmpeg directly with v4l2
let mut cmd = StdCommand::new("ffmpeg");
cmd.args(&[
"-f", "v4l2", "-framerate", "30", "-video_size", "640x480", "-i", "/dev/video0",
"-c:v", "mjpeg", "-preset", "ultrafast", "-tune", "zerolatency", "-f", "mpegts",
"-",
]);
cmd.stdout(StdStdio::piped()).stderr(StdStdio::null());
cmd.spawn().context("Failed to spawn ffmpeg for camera")?
}
MediaKind::Screen => {
// Screen: Use pipewiresrc (via gst-launch) with MJPEG encoding
// User requested configurable resolution (default 720p).
let (w, h) = resolution.unwrap_or((1920, 1080));
// Pipeline: pipewiresrc -> videoscale -> jpegenc -> fdsink
let pipeline = format!(
"gst-launch-1.0 -q pipewiresrc do-timestamp=true \
! videoscale ! video/x-raw,width={},height={} \
! jpegenc quality=50 \
! fdsink",
w, h
);
let mut cmd = StdCommand::new("sh");
cmd.args(&["-c", &pipeline]);
cmd.stdout(StdStdio::piped()).stderr(StdStdio::null());
cmd.spawn().context("Failed to spawn pipewire capture pipeline")?
}
_ => anyhow::bail!("Unsupported media kind for capture"),
};
let mut stdout = child.stdout.take().context("Failed to open capture stdout")?;
let mut buffer = [0u8; 4096]; // 4KB chunks
while running.load(Ordering::Relaxed) {
match stdout.read(&mut buffer) {
Ok(0) => break, // EOF
Ok(n) => {
let chunk = VideoChunk {
timestamp_ms: std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_millis() as u64,
data: buffer[..n].to_vec(),
};
if chunk_tx.send(chunk).is_err() {
break; // Receiver dropped
}
}
Err(e) => {
tracing::error!("Error reading capture stdout: {}", e);
break;
}
}
}
let _ = child.kill();
let _ = child.wait();
Ok(())
}
// ---------------------------------------------------------------------------
// Player Logic (MPV/VLC)
// ---------------------------------------------------------------------------
async fn run_player_loop(mut recv: iroh::endpoint::RecvStream) -> Result<()> {
use tokio::process::Command;
use tokio::io::AsyncWriteExt;
use std::process::Stdio;
// Try spawning mpv
let mut cmd = Command::new("mpv");
cmd.args(&["--no-cache", "--untimed", "--no-terminal", "--profile=low-latency", "-"]);
cmd.stdin(Stdio::piped())
.stdout(Stdio::null())
.stderr(Stdio::null());
// We must kill child on drop.
cmd.kill_on_drop(true);
let mut child = match cmd.spawn() {
Ok(c) => c,
Err(e) => {
tracing::warn!("Failed to spawn mpv ({}), trying vlc...", e);
let mut vlc = Command::new("vlc");
vlc.args(&["-", "--network-caching=300"]);
vlc.stdin(Stdio::piped())
.stdout(Stdio::null())
.stderr(Stdio::null());
vlc.kill_on_drop(true);
vlc.spawn().context("Failed to spawn vlc either")?
}
};
let mut stdin = child.stdin.take().context("Failed to open player stdin")?;
loop {
match decode_framed::<MediaStreamMessage>(&mut recv).await {
Ok(msg) => match msg {
MediaStreamMessage::VideoFrame { data, .. } => {
if let Err(_) = stdin.write_all(&data).await {
break; // Player closed
}
}
MediaStreamMessage::VideoStop { .. } => {
break;
}
_ => {}
},
Err(_) => break, // Stream closed
}
}
// Child is killed on drop (recv loop end)
Ok(())
}

327
src/media/mod.rs Normal file
View File

@@ -0,0 +1,327 @@
//! Media module — voice chat, camera, and screen sharing.
//!
//! Sub-modules:
//! - `voice`: mic capture + Opus codec + PipeWire playback
//! - `capture`: FFmpeg capture + MPV/VLC playback
//!
//! Each feature is runtime-toggleable and runs on dedicated threads/tasks.
pub mod capture;
pub mod voice;
use iroh::EndpointId;
use tracing;
use std::sync::Arc;
use std::sync::atomic::{AtomicU32, Ordering};
use crate::net::NetworkManager;
use crate::protocol::{MediaKind, MediaStreamMessage, decode_framed};
use self::capture::VideoCapture;
use self::voice::VoiceChat;
/// Event sent to the web interface for playback.
#[derive(Debug, Clone)]
pub enum WebMediaEvent {
Audio { peer_id: String, data: Vec<f32> },
Video { peer_id: String, kind: MediaKind, data: Vec<u8> },
}
/// Tracks all active media sessions.
pub struct MediaState {
/// Active voice chat session (if any).
voice: Option<VoiceChat>,
/// Active camera capture (if any).
camera: Option<VideoCapture>,
/// Active screen capture (if any).
screen: Option<VideoCapture>,
/// Playback task handles for incoming streams (voice/video).
/// Using a list to allow multiple streams (audio+video) from same or different peers.
incoming_media: Vec<tokio::task::JoinHandle<()>>,
/// Whether PipeWire is available on this system.
pipewire_available: bool,
/// Configured screen resolution (width, height).
#[allow(dead_code)]
screen_resolution: (u32, u32),
/// Configured microphone name (target).
mic_name: Option<String>,
/// Configured speaker name (target).
speaker_name: Option<String>,
/// Broadcast channel for web playback.
pub broadcast_tx: tokio::sync::broadcast::Sender<WebMediaEvent>,
// Input channels (from Web -> MediaState -> Peers)
pub mic_broadcast: tokio::sync::broadcast::Sender<Vec<f32>>,
pub cam_broadcast: tokio::sync::broadcast::Sender<Vec<u8>>,
pub screen_broadcast: tokio::sync::broadcast::Sender<Vec<u8>>,
pub mic_bitrate: Arc<AtomicU32>,
}
impl MediaState {
pub fn new(screen_resolution: (u32, u32), mic_name: Option<String>, speaker_name: Option<String>, mic_bitrate: u32) -> Self {
let pipewire_available = check_pipewire();
let (broadcast_tx, _) = tokio::sync::broadcast::channel(100);
let (mic_broadcast, _) = tokio::sync::broadcast::channel(100);
let (cam_broadcast, _) = tokio::sync::broadcast::channel(100);
let (screen_broadcast, _) = tokio::sync::broadcast::channel(100);
Self {
voice: None,
camera: None,
screen: None,
incoming_media: Vec::new(),
pipewire_available,
screen_resolution,
mic_name,
speaker_name,
broadcast_tx,
mic_broadcast,
cam_broadcast,
screen_broadcast,
mic_bitrate: Arc::new(AtomicU32::new(mic_bitrate)),
}
}
pub fn set_bitrate(&self, bitrate: u32) {
self.mic_bitrate.store(bitrate, Ordering::Relaxed);
}
/// Update the selected microphone name.
pub fn set_mic_name(&mut self, name: Option<String>) {
self.mic_name = name;
}
/// Update the selected speaker name.
pub fn set_speaker_name(&mut self, name: Option<String>) {
self.speaker_name = name;
}
// -----------------------------------------------------------------------
// Public state queries
// -----------------------------------------------------------------------
pub fn voice_enabled(&self) -> bool {
self.voice.is_some()
}
pub fn camera_enabled(&self) -> bool {
self.camera.is_some()
}
pub fn screen_enabled(&self) -> bool {
self.screen.is_some()
}
#[allow(dead_code)]
pub fn pipewire_available(&self) -> bool {
self.pipewire_available
}
// -----------------------------------------------------------------------
// Toggle methods — return a status message for the TUI
// -----------------------------------------------------------------------
/// Toggle voice chat. Opens media QUIC streams to all current peers.
pub async fn toggle_voice(&mut self, net: NetworkManager) -> &'static str {
if !self.pipewire_available {
return "Voice chat unavailable (PipeWire not found)";
}
if self.voice.is_some() {
// Stop
if let Some(mut v) = self.voice.take() {
v.stop();
}
"🎤 Voice chat stopped"
} else {
// Start — open media streams to all peers
// For web capture, we don't open streams here. start_web does it.
let peers = net.peers.lock().await;
match VoiceChat::start_web(
net.clone(),
peers.keys().cloned().collect(),
self.mic_broadcast.subscribe(), // Subscribe to get new receiver!
self.broadcast_tx.clone(),
self.mic_bitrate.clone(),
) {
Ok(vc) => {
self.voice = Some(vc);
"🎤 Voice chat started (Web)"
}
Err(e) => {
tracing::error!("Failed to start voice chat: {}", e);
"🎤 Failed to start voice chat"
}
}
}
}
/// Toggle camera capture.
pub async fn toggle_camera(&mut self, net: NetworkManager) -> &'static str {
// We use ffmpeg now, which doesn't strictly depend on pipewire crate,
// but likely requires pipewire daemon or v4l2.
// We kept pipewire check for consistency but it might be loose.
if self.camera.is_some() {
if let Some(mut c) = self.camera.take() {
c.stop();
}
"📷 Camera stopped"
} else {
// Start
let peers = net.peers.lock().await;
match VideoCapture::start_web(
MediaKind::Camera,
net.our_id,
peers.keys().cloned().collect(),
net.clone(),
self.cam_broadcast.clone(),
).await {
Ok(vc) => {
self.camera = Some(vc);
"📷 Camera started (Web)"
}
Err(e) => {
tracing::error!("Failed to start camera: {}", e);
"📷 Failed to start camera"
}
}
}
}
/// Toggle screen sharing.
pub async fn toggle_screen(&mut self, net: NetworkManager) -> &'static str {
if self.screen.is_some() {
if let Some(mut s) = self.screen.take() {
s.stop();
}
"🖥 Screen sharing stopped"
} else {
// Start
let peers = net.peers.lock().await;
match VideoCapture::start_web(
MediaKind::Screen,
net.our_id,
peers.keys().cloned().collect(),
net.clone(),
self.screen_broadcast.clone(),
).await {
Ok(vc) => {
self.screen = Some(vc);
"🖥️ Screen share started (Web)"
}
Err(e) => {
tracing::error!("Failed to start screen share: {}", e);
"🖥️ Failed to start screen share"
}
}
}
}
// -----------------------------------------------------------------------
// Incoming media handling
// -----------------------------------------------------------------------
/// Handle an incoming media stream from a peer.
pub fn handle_incoming_media(
&mut self,
from: EndpointId,
kind: MediaKind,
_send: iroh::endpoint::SendStream,
mut recv: iroh::endpoint::RecvStream,
) {
let speaker_name = self.speaker_name.clone();
let broadcast_tx = self.broadcast_tx.clone();
// Spawn a task to determine stream type and handle it
let handle = tokio::spawn(async move {
// Read first message to determine type.
// Note: We already know the kind from ALPN, but we still decode the start message.
match decode_framed::<MediaStreamMessage>(&mut recv).await {
Ok(msg) => match msg {
MediaStreamMessage::AudioStart { .. } => {
if kind != MediaKind::Voice {
tracing::warn!("ALPN mismatch: expected Voice, got AudioStart");
}
tracing::info!("Accepted Audio stream from {:?}", from);
if let Err(e) = VoiceChat::handle_incoming_audio_web(from, msg, recv, broadcast_tx).await {
tracing::error!("Audio web playback error: {}", e);
}
}
MediaStreamMessage::VideoStart { .. } => {
tracing::info!("Accepted Video stream from {:?}", from);
if let Err(e) = VideoCapture::handle_incoming_video_web(from, msg, recv, broadcast_tx).await {
tracing::error!("Video web playback error: {}", e);
}
}
_ => {
tracing::warn!("Unknown or unexpected start message from {:?}: {:?}", from, msg);
}
},
Err(e) => {
tracing::warn!("Failed to decode initial media message from {:?}: {}", from, e);
}
}
});
// Store handle to allow cleanup on shutdown
// We clean up finished tasks periodically or on shutdown
self.incoming_media.push(handle);
// Clean up finished tasks
self.incoming_media.retain(|h| !h.is_finished());
}
// -----------------------------------------------------------------------
// Status for TUI
// -----------------------------------------------------------------------
/// Get a status line for the TUI status bar.
pub fn status_line(&self) -> String {
let mic = if self.voice_enabled() {
"🎤 LIVE"
} else {
"🎤 off"
};
let cam = if self.camera_enabled() {
"📷 LIVE"
} else {
"📷 off"
};
let scr = if self.screen_enabled() {
"🖥 LIVE"
} else {
"🖥 off"
};
format!("{}{}{}", mic, cam, scr)
}
/// Shut down all active media.
pub fn shutdown(&mut self) {
if let Some(mut v) = self.voice.take() {
v.stop();
}
if let Some(mut c) = self.camera.take() {
c.stop();
}
if let Some(mut s) = self.screen.take() {
s.stop();
}
for handle in self.incoming_media.drain(..) {
handle.abort();
}
}
}
impl Drop for MediaState {
fn drop(&mut self) {
self.shutdown();
}
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
/// Check if PipeWire is available on this system.
fn check_pipewire() -> bool {
// Try to initialize PipeWire — if it fails, it's not available
std::panic::catch_unwind(|| {
pipewire::init();
})
.is_ok()
}

331
src/media/voice.rs Normal file
View File

@@ -0,0 +1,331 @@
//! Voice capture and playback using PipeWire + Audiopus (via Songbird dependency).
//!
//! Architecture:
//! - Capture runs on a dedicated OS thread (PipeWire main loop).
//! - PipeWire process callback copies PCM → crossbeam channel.
//! - Async task reads from channel, encodes with Opus, sends over QUIC.
//! - Playback: receives Opus packets from QUIC, decodes, feeds to PipeWire output.
use std::sync::atomic::{AtomicBool, AtomicU32, Ordering};
use std::sync::Arc;
use std::thread;
use anyhow::Result;
use crate::protocol::{write_framed, decode_framed, MediaStreamMessage};
use crate::media::WebMediaEvent;
// Use audiopus types directly
use audiopus::{coder::Encoder as OpusEncoder, coder::Decoder as OpusDecoder, Channels, Application, SampleRate, Bitrate};
// Constants
const SAMPLE_RATE_VAL: i32 = 48000;
const FRAME_SIZE_MS: u32 = 20; // 20ms
const FRAME_SIZE_SAMPLES: usize = (SAMPLE_RATE_VAL as usize * FRAME_SIZE_MS as usize) / 1000;
/// Represents an available audio device (source or sink).
#[derive(Debug, Clone)]
pub struct AudioDevice {
/// PipeWire node name (used as target.object)
pub node_name: String,
/// Human-readable description
pub description: String,
}
/// List available audio input sources via `pw-dump`.
pub fn list_audio_sources() -> Vec<AudioDevice> {
list_audio_nodes("Audio/Source")
}
/// List available audio output sinks via `pw-dump`.
pub fn list_audio_sinks() -> Vec<AudioDevice> {
list_audio_nodes("Audio/Sink")
}
fn list_audio_nodes(filter_class: &str) -> Vec<AudioDevice> {
use std::process::Command;
let output = match Command::new("pw-dump").output() {
Ok(o) => o,
Err(_) => return Vec::new(),
};
if !output.status.success() {
return Vec::new();
}
let json_str = match String::from_utf8(output.stdout) {
Ok(s) => s,
Err(_) => return Vec::new(),
};
let data: Vec<serde_json::Value> = match serde_json::from_str(&json_str) {
Ok(d) => d,
Err(_) => return Vec::new(),
};
let mut sources = Vec::new();
for obj in &data {
let props = match obj.get("info").and_then(|i| i.get("props")) {
Some(p) => p,
None => continue,
};
let media_class = props.get("media.class").and_then(|v| v.as_str()).unwrap_or("");
// Match partial class, e.g. "Audio/Source", "Audio/Sink", "Audio/Duplex"
if !media_class.contains(filter_class) && !media_class.contains("Audio/Duplex") {
continue;
}
let node_name = props.get("node.name").and_then(|v| v.as_str()).unwrap_or("").to_string();
let description = props
.get("node.description")
.or_else(|| props.get("node.nick"))
.and_then(|v| v.as_str())
.unwrap_or(&node_name)
.to_string();
if !node_name.is_empty() {
sources.push(AudioDevice { node_name, description });
}
}
sources
}
/// Main voice chat coordination.
pub struct VoiceChat {
running: Arc<AtomicBool>,
capture_thread: Option<thread::JoinHandle<()>>,
tasks: Vec<tokio::task::JoinHandle<()>>,
}
impl VoiceChat {
/// Start voice chat session (Web Version).
/// Uses browser for capture/playback handling implicitly via `MediaState` channels,
/// but here we handle the NETWORK side encoding/decoding.
pub fn start_web(
net: crate::net::NetworkManager,
peers: Vec<iroh::EndpointId>, // Multiple peers
mic_rx: tokio::sync::broadcast::Receiver<Vec<f32>>,
_broadcast_tx: tokio::sync::broadcast::Sender<WebMediaEvent>,
mic_bitrate: Arc<AtomicU32>,
) -> Result<Self> {
let running = Arc::new(AtomicBool::new(true));
let mut tasks = Vec::new();
// One sender task to iterate ALL peers? Or one task per peer?
// Since input is broadcast channel, spawning multiple tasks (one per peer) works well.
// Each task gets a copy of `mic_rx`. Wait, `broadcast::Receiver` can be cloned?
// No, `mic_rx` must be resubscribed?
// `broadcast::Receiver` is `Clone`? No, `Sender::subscribe` returns receiver.
// But we are passing `Receiver` in.
// We can't clone a receiver to get same stream easily without `resubscribe`.
// But `resubscribe` needs sender.
// We should probably spawn ONE task that reads from `mic_rx` and sends to ALL peers.
// That is more efficient (encode once, send N times).
let sender_running = running.clone();
let net_clone = net.clone();
let mic_bitrate_clone = mic_bitrate.clone();
let sender_task = tokio::spawn(async move {
if let Err(e) = run_opis_sender_web_multi(net_clone, peers, mic_rx, sender_running, mic_bitrate_clone).await {
tracing::error!("Voice sender failed: {}", e);
}
});
tasks.push(sender_task);
Ok(Self {
running,
capture_thread: None,
tasks,
})
}
/// Stop voice chat.
pub fn stop(&mut self) {
self.running.store(false, Ordering::SeqCst);
for task in &self.tasks {
task.abort();
}
self.tasks.clear();
if let Some(t) = self.capture_thread.take() {
t.thread().unpark(); // Wake up if sleeping
let _ = t.join();
}
}
/// Handle incoming audio stream (Web Version).
pub async fn handle_incoming_audio_web(
from: iroh::EndpointId,
message: MediaStreamMessage,
mut recv: iroh::endpoint::RecvStream,
broadcast_tx: tokio::sync::broadcast::Sender<WebMediaEvent>,
) -> Result<()> {
// Initialize Opus decoder
let mut decoder = OpusDecoder::new(SampleRate::Hz48000, Channels::Mono)
.map_err(|e| anyhow::anyhow!("Failed to create Opus decoder: {:?}", e))?;
// Process start message
match message {
MediaStreamMessage::AudioStart { .. } => {
tracing::info!("Incoming voice stream started (web) from {}", from);
}
_ => anyhow::bail!("Expected AudioStart"),
}
let mut decode_buf = vec![0f32; FRAME_SIZE_SAMPLES];
loop {
let msg: MediaStreamMessage = match decode_framed(&mut recv).await {
Ok(m) => m,
Err(_) => break, // EOF
};
match msg {
MediaStreamMessage::AudioData { opus_data, .. } => { // Removed `channels` field usage if it existed
match decoder.decode_float(Some(&opus_data), &mut decode_buf, false) {
Ok(len) => {
let samples = decode_buf[..len].to_vec();
// Broadcast to web
let short_id: String = format!("{}", from).chars().take(8).collect();
let _ = broadcast_tx.send(WebMediaEvent::Audio { peer_id: short_id, data: samples });
}
Err(e) => {
tracing::warn!("Opus decode error: {:?}", e);
}
}
}
MediaStreamMessage::AudioStop => {
tracing::info!("Peer stopped audio");
break;
}
_ => {}
}
}
Ok(())
}
}
// ---------------------------------------------------------------------------
// Opus sender — encodes PCM and sends over QUIC (Multi-Peer)
// ---------------------------------------------------------------------------
async fn run_opis_sender_web_multi(
network_manager: crate::net::NetworkManager,
peers: Vec<iroh::EndpointId>,
mut input_rx: tokio::sync::broadcast::Receiver<Vec<f32>>,
running: Arc<AtomicBool>,
mic_bitrate: Arc<AtomicU32>,
) -> Result<()> {
if peers.is_empty() {
return Ok(());
}
// Open streams to all peers
let mut streams = Vec::new();
for peer in peers {
match network_manager.open_media_stream(peer, crate::protocol::MediaKind::Voice).await {
Ok((mut send, _)) => {
if let Err(e) = write_framed(&mut send, &MediaStreamMessage::AudioStart {
channels: 1,
sample_rate: 48000,
frame_size_ms: FRAME_SIZE_MS as u8,
}).await {
tracing::error!("Failed to send start to {}: {}", peer, e);
continue;
}
streams.push(send);
}
Err(e) => {
tracing::warn!("Failed to open voice stream to {}: {}", peer, e);
}
}
}
if streams.is_empty() {
tracing::warn!("No peers connected for voice chat");
}
let mut encoder = OpusEncoder::new(SampleRate::Hz48000, Channels::Mono, Application::Voip)
.map_err(|e| anyhow::anyhow!("Failed to create Opus encoder: {:?}", e))?;
// Set initial bitrate
let mut current_bitrate = mic_bitrate.load(Ordering::Relaxed);
encoder.set_bitrate(Bitrate::BitsPerSecond(current_bitrate as i32))
.map_err(|e| anyhow::anyhow!("Failed to set bitrate: {:?}", e))?;
// Opus frame size: 20ms at 48kHz = 960 samples
let frame_size = FRAME_SIZE_SAMPLES;
let mut pcm_buffer: Vec<f32> = Vec::with_capacity(frame_size * 2);
let mut opus_buffer = vec![0u8; 1500]; // MTU-ish
while running.load(Ordering::Relaxed) {
// Check for bitrate change
let new_bitrate = mic_bitrate.load(Ordering::Relaxed);
if new_bitrate != current_bitrate {
if let Err(e) = encoder.set_bitrate(Bitrate::BitsPerSecond(new_bitrate as i32)) {
tracing::warn!("Failed to update bitrate to {}: {:?}", new_bitrate, e);
} else {
tracing::info!("Updated Opus bitrate to {} bps", new_bitrate);
current_bitrate = new_bitrate;
}
}
// Receive PCM from Web
match input_rx.recv().await {
Ok(samples) => {
pcm_buffer.extend_from_slice(&samples);
// Process 20ms chunks
while pcm_buffer.len() >= frame_size {
let chunk: Vec<f32> = pcm_buffer.drain(0..frame_size).collect();
match encoder.encode_float(&chunk, &mut opus_buffer) {
Ok(len) => {
let packet = opus_buffer[..len].to_vec();
let msg = MediaStreamMessage::AudioData {
sequence: 0, // Should maintain seq counter? Protocol doesn't have it?
// Need to check Protocol definition.
// Assuming I need to match variants.
opus_data: packet.clone(),
};
// Send to all streams
// remove closed streams
// This is async inside a loop which makes ownership tricky for Vec<SendStream>
// but we can iterate.
let mut keep_indices = Vec::new();
for (i, stream) in streams.iter_mut().enumerate() {
if let Err(e) = write_framed(stream, &msg).await {
tracing::error!("Failed to send audio packet: {}", e);
// mark for removal
} else {
keep_indices.push(i);
}
}
// Naive removal (rebuild vec) if needed, or just warn.
// Ideally remove bad streams.
}
Err(e) => {
tracing::error!("Opus encode error: {:?}", e);
}
}
}
}
Err(tokio::sync::broadcast::error::RecvError::Closed) => break,
Err(tokio::sync::broadcast::error::RecvError::Lagged(_)) => {
// Skip if lagged
}
}
}
// Send Stop
for stream in &mut streams {
let _ = write_framed(stream, &MediaStreamMessage::AudioStop).await;
let _ = stream.finish(); // fire and forget
}
Ok(())
}

340
src/net/mod.rs Normal file
View File

@@ -0,0 +1,340 @@
//! Networking layer using iroh for QUIC connections and iroh-gossip for broadcast.
//!
//! This module manages:
//! - The iroh `Endpoint` and `Router`
//! - Gossip-based broadcast for chat messages
//! - Peer connection tracking
//! - File transfer stream handling
use std::collections::HashMap;
use std::sync::Arc;
use anyhow::{Context, Result};
// use iroh::protocol::Router;
use iroh::Endpoint;
pub use iroh::EndpointId;
use iroh_gossip::Gossip;
use iroh_gossip::TopicId;
use n0_future::StreamExt;
use tokio::sync::{mpsc, Mutex};
use crate::protocol::{self, GossipMessage};
/// ALPN identifier for file transfers (direct peer-to-peer streams).
pub const FILE_TRANSFER_ALPN: &[u8] = b"p2p-chat/file/1";
/// ALPN identifier for voice chat streams.
pub const VOICE_ALPN: &[u8] = b"p2p-chat/voice/1";
/// ALPN identifier for camera streams.
pub const CAMERA_ALPN: &[u8] = b"p2p-chat/camera/1";
/// ALPN identifier for screen sharing streams.
pub const SCREEN_ALPN: &[u8] = b"p2p-chat/screen/1";
/// Network events sent to the application layer.
#[derive(Debug)]
pub enum NetEvent {
/// A gossip message was received from a peer.
GossipReceived {
from: EndpointId,
message: GossipMessage,
},
/// A peer came online (joined the gossip swarm).
PeerUp(EndpointId),
/// A peer went offline (left the gossip swarm).
PeerDown(EndpointId),
/// An incoming file transfer stream was accepted.
IncomingFileStream {
from: EndpointId,
#[allow(dead_code)]
send: iroh::endpoint::SendStream,
#[allow(dead_code)]
recv: iroh::endpoint::RecvStream,
},
/// An incoming media stream was accepted.
IncomingMediaStream {
from: EndpointId,
kind: protocol::MediaKind,
send: iroh::endpoint::SendStream,
recv: iroh::endpoint::RecvStream,
},
}
/// Information about a connected peer.
#[derive(Debug, Clone)]
pub struct PeerInfo {
pub id: EndpointId,
pub name: Option<String>,
pub capabilities: Option<protocol::CapabilitiesMessage>,
pub is_self: bool,
}
/// Manages the iroh networking stack.
#[derive(Clone)]
pub struct NetworkManager {
pub endpoint: Endpoint,
// We handle routing manually now
// router: Option<Router>,
pub gossip: Gossip,
topic: TopicId,
gossip_sender: Option<iroh_gossip::api::GossipSender>,
pub peers: Arc<Mutex<HashMap<EndpointId, PeerInfo>>>,
pub our_id: EndpointId,
}
impl NetworkManager {
/// Connect to a peer by ID.
/// Connect to a peer by ID.
pub async fn connect(&self, peer_id: EndpointId) -> Result<()> {
// 1. Establish transport connection
self.endpoint
.connect(peer_id, iroh_gossip::ALPN)
.await
.context("Failed to connect to peer")?;
// 2. Add peer to gossip by subscribing with it as a bootstrap peer.
// We spawn a task to drain the new receiver so the subscription stays active.
// This ensures the gossip actor knows about this peer.
let (_sender, mut receiver) = self
.gossip
.subscribe(self.topic, vec![peer_id])
.await
.context("Failed to subscribe with new peer")?
.split();
tokio::spawn(async move {
while let Some(_event) = receiver.next().await {
// Drain events to keep subscription active.
// We ignore them because the main subscription loop triggers
// the actual application logic (NetEvent).
}
});
Ok(())
}
/// Create a new NetworkManager and start the iroh endpoint.
pub async fn new(topic_bytes: [u8; 32]) -> Result<(Self, mpsc::Sender<NetEvent>, mpsc::Receiver<NetEvent>)> {
let (event_tx, event_rx) = mpsc::channel(256);
// Create endpoint with file transfer ALPN and Gossip ALPN
let endpoint = Endpoint::builder()
.alpns(vec![
FILE_TRANSFER_ALPN.to_vec(),
VOICE_ALPN.to_vec(),
CAMERA_ALPN.to_vec(),
SCREEN_ALPN.to_vec(),
iroh_gossip::ALPN.to_vec(),
])
.bind()
.await
.context("Failed to bind iroh endpoint")?;
let our_id = endpoint.id();
let topic = TopicId::from_bytes(topic_bytes);
// Build gossip protocol
let gossip = Gossip::builder().spawn(endpoint.clone());
let mgr = Self {
endpoint,
gossip: gossip.clone(),
// router: None,
topic,
gossip_sender: None,
peers: Arc::new(Mutex::new(HashMap::new())),
our_id,
};
// Spawn unified connection acceptor
Self::spawn_acceptor(mgr.endpoint.clone(), gossip, event_tx.clone());
Ok((mgr, event_tx, event_rx))
}
/// Subscribe to the gossip topic and start receiving messages.
/// `bootstrap_peers` are the initial peers to connect to.
pub async fn join_gossip(
&mut self,
bootstrap_peers: Vec<EndpointId>,
event_tx: mpsc::Sender<NetEvent>,
) -> Result<()> {
let (sender, mut receiver) = self
.gossip
.subscribe(self.topic, bootstrap_peers)
.await
.context("Failed to subscribe to gossip topic")?
.split();
self.gossip_sender = Some(sender);
let peers = self.peers.clone();
// Spawn gossip event receiver
tokio::spawn(async move {
while let Some(event) = receiver.next().await {
match event {
Ok(iroh_gossip::api::Event::Received(msg)) => {
// Deserialize the gossip message
match postcard::from_bytes::<GossipMessage>(&msg.content) {
Ok(gossip_msg) => {
let _ = event_tx
.send(NetEvent::GossipReceived {
from: msg.delivered_from,
message: gossip_msg,
})
.await;
}
Err(e) => {
tracing::warn!("Failed to deserialize gossip message: {}", e);
}
}
}
Ok(iroh_gossip::api::Event::NeighborUp(peer_id)) => {
{
let mut p = peers.lock().await;
p.entry(peer_id).or_insert_with(|| PeerInfo {
id: peer_id,
name: None,
capabilities: None,
is_self: false,
});
}
let _ = event_tx.send(NetEvent::PeerUp(peer_id)).await;
}
Ok(iroh_gossip::api::Event::NeighborDown(peer_id)) => {
{
let mut p = peers.lock().await;
p.remove(&peer_id);
}
let _ = event_tx.send(NetEvent::PeerDown(peer_id)).await;
}
Ok(_) => {}
Err(e) => {
tracing::error!("Gossip receiver error: {}", e);
break;
}
}
}
});
Ok(())
}
/// Broadcast a gossip message to all peers in the topic.
pub async fn broadcast(&self, msg: &GossipMessage) -> Result<()> {
if let Some(sender) = &self.gossip_sender {
let data = postcard::to_allocvec(msg)?;
sender.broadcast(data.into()).await?;
}
Ok(())
}
/// Get our endpoint address for sharing with peers.
#[allow(dead_code)]
pub fn our_addr(&self) -> iroh::EndpointAddr {
self.endpoint.addr()
}
/// Open a bi-directional stream to a specific peer for file transfer.
#[allow(dead_code)]
pub async fn open_file_stream(
&self,
peer_id: EndpointId,
) -> Result<(iroh::endpoint::SendStream, iroh::endpoint::RecvStream)> {
let conn = self
.endpoint
.connect(peer_id, FILE_TRANSFER_ALPN)
.await
.context("Failed to connect for file transfer")?;
let (send, recv) = conn.open_bi().await?;
Ok((send, recv))
}
/// Open a bi-directional stream to a specific peer for media.
pub async fn open_media_stream(
&self,
peer_id: EndpointId,
kind: protocol::MediaKind,
) -> Result<(iroh::endpoint::SendStream, iroh::endpoint::RecvStream)> {
let alpn = match kind {
protocol::MediaKind::Voice => VOICE_ALPN,
protocol::MediaKind::Camera => CAMERA_ALPN,
protocol::MediaKind::Screen => SCREEN_ALPN,
};
let conn = self
.endpoint
.connect(peer_id, alpn)
.await
.context("Failed to connect for media stream")?;
let (send, recv) = conn.open_bi().await?;
Ok((send, recv))
}
/// Spawn a task that accepts incoming connections and dispatches them by ALPN.
fn spawn_acceptor(endpoint: Endpoint, gossip: Gossip, event_tx: mpsc::Sender<NetEvent>) {
tokio::spawn(async move {
while let Some(incoming) = endpoint.accept().await {
let gossip = gossip.clone();
let event_tx = event_tx.clone();
tokio::spawn(async move {
match incoming.await {
Ok(conn) => {
let alpn = conn.alpn().to_vec();
let peer_id = conn.remote_id();
if alpn == iroh_gossip::ALPN {
// Dispatch to gossip
if let Err(e) = gossip.handle_connection(conn).await {
tracing::warn!("Gossip failed to handle connection: {}", e);
}
} else if alpn == FILE_TRANSFER_ALPN || alpn == VOICE_ALPN || alpn == CAMERA_ALPN || alpn == SCREEN_ALPN {
// Handle application protocols
match conn.accept_bi().await {
Ok((send, recv)) => {
let event = if alpn == FILE_TRANSFER_ALPN {
NetEvent::IncomingFileStream {
from: peer_id,
send,
recv,
}
} else {
let kind = if alpn == VOICE_ALPN {
protocol::MediaKind::Voice
} else if alpn == CAMERA_ALPN {
protocol::MediaKind::Camera
} else {
protocol::MediaKind::Screen
};
NetEvent::IncomingMediaStream {
from: peer_id,
kind,
send,
recv,
}
};
let _ = event_tx.send(event).await;
}
Err(e) => {
tracing::warn!("Failed to accept bi stream: {}", e);
}
}
} else {
tracing::warn!("Ignoring connection with unknown ALPN: {:?}", String::from_utf8_lossy(&alpn));
}
}
Err(e) => {
tracing::warn!("Failed to establish connection: {}", e);
}
}
});
}
});
}
/// Graceful shutdown.
pub async fn shutdown(self) -> Result<()> {
self.endpoint.close().await;
Ok(())
}
}

225
src/protocol/mod.rs Normal file
View File

@@ -0,0 +1,225 @@
//! Protocol message types for the P2P chat application.
//!
//! All messages are serialized with postcard and transmitted either via
//! iroh-gossip broadcast (chat, capabilities) or via dedicated QUIC streams (files).
use serde::{Deserialize, Serialize};
/// Unique identifier for a file transfer.
pub type FileId = [u8; 16];
/// Generate a random file ID.
pub fn new_file_id() -> FileId {
let id = uuid::Uuid::new_v4();
*id.as_bytes()
}
// ---------------------------------------------------------------------------
// Gossip messages (broadcast to all peers)
// ---------------------------------------------------------------------------
/// Top-level envelope for messages sent over the gossip broadcast channel.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum GossipMessage {
Chat(ChatMessage),
Capabilities(CapabilitiesMessage),
PeerAnnounce(PeerAnnounce),
FileOfferBroadcast(FileOfferBroadcast),
NameChange(NameChange),
/// Graceful disconnect notification.
Disconnect { sender_name: String },
}
/// A name change notification from a peer.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct NameChange {
pub old_name: String,
pub new_name: String,
}
/// A chat message from a peer.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatMessage {
/// Display name of the sender.
pub sender_name: String,
/// Unix timestamp in milliseconds.
pub timestamp: u64,
/// UTF-8 message text.
pub text: String,
}
/// Capabilities advertised by a peer.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CapabilitiesMessage {
pub sender_name: String,
pub chat: bool,
pub files: bool,
pub audio: bool,
pub camera: bool,
pub screen: bool,
}
impl Default for CapabilitiesMessage {
fn default() -> Self {
Self {
sender_name: String::new(),
chat: true,
files: true,
audio: false,
camera: false,
screen: false,
}
}
}
/// Announce a peer joining the swarm.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PeerAnnounce {
pub sender_name: String,
}
/// Broadcast a file offer so the recipient can see it in chat.
/// Actual transfer happens over a dedicated stream.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileOfferBroadcast {
pub sender_name: String,
pub file_id: FileId,
pub file_name: String,
pub file_size: u64,
}
// ---------------------------------------------------------------------------
// Stream messages (point-to-point over dedicated QUIC bi-streams)
// ---------------------------------------------------------------------------
/// Messages sent over a dedicated file transfer QUIC stream.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[allow(dead_code)]
pub enum FileStreamMessage {
Offer(FileOffer),
Accept(FileAcceptReject),
Reject(FileAcceptReject),
Chunk(FileChunk),
Done(FileDone),
}
/// File offer sent at the start of a file transfer stream.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileOffer {
pub file_id: FileId,
pub name: String,
pub size: u64,
pub checksum: [u8; 32],
}
/// Accept or reject a file offer.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[allow(dead_code)]
pub struct FileAcceptReject {
pub file_id: FileId,
}
/// A chunk of file data.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[allow(dead_code)]
pub struct FileChunk {
pub file_id: FileId,
pub offset: u64,
pub data: Vec<u8>,
}
/// Signal that a file transfer is complete.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[allow(dead_code)]
pub struct FileDone {
pub file_id: FileId,
}
// ---------------------------------------------------------------------------
// Media stream messages (point-to-point over dedicated QUIC bi-streams)
// ---------------------------------------------------------------------------
/// Which kind of media a stream carries.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum MediaKind {
Voice,
Camera,
Screen,
}
/// Messages sent over a dedicated media QUIC stream.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum MediaStreamMessage {
/// Audio session start — tells the receiver the format.
AudioStart {
sample_rate: u32,
channels: u8,
frame_size_ms: u8,
},
/// One Opus-encoded audio frame.
AudioData {
sequence: u64,
opus_data: Vec<u8>,
},
/// Audio session ended.
AudioStop,
/// Video session start (camera or screen).
VideoStart {
kind: MediaKind,
width: u32,
height: u32,
fps: u8,
},
/// A video frame (MPEG-TS chunk).
VideoFrame {
sequence: u64,
timestamp_ms: u64,
data: Vec<u8>,
},
/// Video session ended.
VideoStop {
kind: MediaKind,
},
}
// ---------------------------------------------------------------------------
// Serialization helpers (length-delimited framing)
// ---------------------------------------------------------------------------
/// Serialize a message to bytes with a 4-byte big-endian length prefix.
#[allow(dead_code)]
pub fn encode_framed<T: Serialize>(msg: &T) -> anyhow::Result<Vec<u8>> {
let payload = postcard::to_allocvec(msg)?;
let len = payload.len() as u32;
let mut buf = Vec::with_capacity(4 + payload.len());
buf.extend_from_slice(&len.to_be_bytes());
buf.extend_from_slice(&payload);
Ok(buf)
}
/// Read a length-delimited framed message from an async QUIC recv stream.
pub async fn decode_framed<T: for<'de> Deserialize<'de>>(
recv: &mut iroh::endpoint::RecvStream,
) -> anyhow::Result<T> {
let mut len_buf = [0u8; 4];
recv.read_exact(&mut len_buf).await?;
let len = u32::from_be_bytes(len_buf) as usize;
if len > 64 * 1024 * 1024 {
anyhow::bail!("Message too large: {} bytes", len);
}
let mut payload = vec![0u8; len];
recv.read_exact(&mut payload).await?;
let msg: T = postcard::from_bytes(&payload)?;
Ok(msg)
}
/// Write a length-delimited framed message to an async QUIC send stream.
pub async fn write_framed<T: Serialize>(
send: &mut iroh::endpoint::SendStream,
msg: &T,
) -> anyhow::Result<()> {
let data = encode_framed(msg)?;
send.write_all(&data).await?;
Ok(())
}

88
src/tui/chat_panel.rs Normal file
View File

@@ -0,0 +1,88 @@
//! Chat panel widget — scrollable chat history.
use ratatui::layout::Rect;
use ratatui::style::{Modifier, Style};
use ratatui::text::{Line, Span};
use ratatui::widgets::{Block, Borders, List, ListItem};
use ratatui::Frame;
use crate::chat::{ChatEntry, ChatState};
use crate::tui::App;
pub fn render(frame: &mut Frame, area: Rect, chat: &ChatState, app: &App) {
let block = Block::default()
.title(" 💬 Chat ")
.borders(Borders::ALL)
.border_style(Style::default().fg(app.theme.border));
let inner = block.inner(area);
frame.render_widget(block, area);
if chat.history.is_empty() {
let empty = ratatui::widgets::Paragraph::new("No messages yet. Start typing!")
.style(Style::default().fg(app.theme.time));
frame.render_widget(empty, inner);
return;
}
let visible_height = inner.height as usize;
let total = chat.history.len();
// Calculate scroll position
let end = if app.scroll_offset >= total {
total
} else {
total - app.scroll_offset
};
let start = end.saturating_sub(visible_height);
let items: Vec<ListItem> = chat.history[start..end]
.iter()
.map(|entry| format_entry(entry, app))
.collect();
let list = List::new(items);
frame.render_widget(list, inner);
}
fn format_entry(entry: &ChatEntry, app: &App) -> ListItem<'static> {
let time = chrono::DateTime::from_timestamp_millis(entry.timestamp as i64)
.map(|dt| dt.format("%H:%M").to_string())
.unwrap_or_default();
if entry.is_system {
let line = Line::from(vec![
Span::styled(
format!("[{}] ", time),
Style::default().fg(app.theme.time),
),
Span::styled(
format!("*** {} ***", entry.text),
Style::default()
.fg(app.theme.system_msg)
.add_modifier(Modifier::ITALIC),
),
]);
return ListItem::new(line);
}
let name_color = if entry.is_self {
app.theme.self_name
} else {
app.theme.peer_name
};
let line = Line::from(vec![
Span::styled(format!("[{}] ", time), Style::default().fg(app.theme.time)),
Span::styled(
format!("{}: ", entry.sender_name),
Style::default()
.fg(name_color)
.add_modifier(Modifier::BOLD),
),
Span::styled(entry.text.clone(), Style::default().fg(app.theme.text)),
]);
ListItem::new(line)
}

50
src/tui/file_panel.rs Normal file
View File

@@ -0,0 +1,50 @@
//! File transfer panel widget.
use ratatui::layout::Rect;
use ratatui::style::{Color, Style};
use ratatui::text::{Line, Span};
use ratatui::widgets::{Block, Borders, List, ListItem};
use ratatui::Frame;
use crate::file_transfer::{FileTransferManager, TransferState};
use crate::tui::App;
pub fn render(frame: &mut Frame, area: Rect, file_mgr: &FileTransferManager, app: &App) {
let transfers = file_mgr.active_transfers();
let block = Block::default()
.title(format!(" 📦 Transfers ({}) ", transfers.len()))
.borders(Borders::ALL)
.border_style(Style::default().fg(app.theme.border));
let inner = block.inner(area);
frame.render_widget(block, area);
if transfers.is_empty() {
let empty = ratatui::widgets::Paragraph::new("No active transfers")
.style(Style::default().fg(app.theme.time));
frame.render_widget(empty, inner);
return;
}
let items: Vec<ListItem> = transfers
.iter()
.map(|info| {
let text = FileTransferManager::format_progress(info);
let color = match &info.state {
TransferState::Complete => Color::Green,
TransferState::Rejected | TransferState::Failed(_) => Color::Red,
TransferState::Transferring { .. } => Color::Cyan,
TransferState::Offering => Color::Yellow,
};
// Note: Transfer colors are functional (state-based), so we keep them hardcoded for now or add to theme?
// "green", "red" etc are standard states. The user asked for "colour changing" which implies TUI theme.
// Let's keep functional state colors hardcoded as they convey specific meaning (success/error),
// but use theme for text/borders.
ListItem::new(Line::from(Span::styled(text, Style::default().fg(color))))
})
.collect();
let list = List::new(items);
frame.render_widget(list, inner);
}

64
src/tui/input.rs Normal file
View File

@@ -0,0 +1,64 @@
//! Input bar widget with cursor.
use ratatui::layout::Rect;
use ratatui::style::Style;
use ratatui::widgets::{Block, Borders, Paragraph};
use ratatui::Frame;
use super::{App, InputMode};
pub fn render(frame: &mut Frame, area: Rect, app: &App) {
let (title, border_color, content) = match app.input_mode {
InputMode::FilePrompt => (
" 📁 File path (Enter to send, Esc to cancel) ",
app.theme.system_msg,
app.file_path_input.as_str(),
),
InputMode::Editing => (" ✏ Message (Esc for commands) ", app.theme.self_name, app.input.as_str()),
InputMode::Normal => (
" Press i/Enter to type, Q=quit, or use /help ",
app.theme.time,
app.input.as_str(),
),
InputMode::MicSelect => (
" 🎤 Selecting microphone... ",
app.theme.border,
"",
),
InputMode::SpeakerSelect => (
" 🔊 Selecting speaker... ",
app.theme.border,
"",
),
};
let block = Block::default()
.title(title)
.borders(Borders::ALL)
.border_style(Style::default().fg(border_color));
let paragraph = Paragraph::new(content.to_string())
.style(Style::default().fg(app.theme.text))
.block(block);
frame.render_widget(paragraph, area);
// Set cursor position when in editing/file mode
match app.input_mode {
InputMode::Editing => {
frame.set_cursor_position((
area.x + 1 + app.cursor_position as u16,
area.y + 1,
));
}
InputMode::FilePrompt => {
frame.set_cursor_position((
area.x + 1 + app.file_path_input.len() as u16,
area.y + 1,
));
}
InputMode::Normal => {}
InputMode::MicSelect => {}
InputMode::SpeakerSelect => {}
}
}

493
src/tui/mod.rs Normal file
View File

@@ -0,0 +1,493 @@
//! TUI module — terminal user interface using ratatui + crossterm.
pub mod chat_panel;
pub mod peer_panel;
pub mod input;
pub mod status_bar;
pub mod file_panel;
use std::path::PathBuf;
use crossterm::event::{KeyCode, KeyEvent};
use ratatui::layout::{Constraint, Direction, Layout, Rect};
use ratatui::style::{Modifier, Style};
use ratatui::text::{Line, Span};
use ratatui::widgets::{Block, Borders, Clear, List, ListItem};
use ratatui::Frame;
use crate::chat::ChatState;
use crate::file_transfer::FileTransferManager;
use crate::media::MediaState;
use crate::media::voice::AudioDevice;
use crate::net::PeerInfo;
/// Input mode for the TUI.
#[derive(Debug, Clone, PartialEq)]
pub enum InputMode {
Normal,
Editing,
#[allow(dead_code)]
FilePrompt,
MicSelect,
SpeakerSelect,
}
/// Commands produced by TUI event handling.
#[derive(Debug)]
pub enum TuiCommand {
SendMessage(String),
/// Local-only system message (not broadcast to peers).
SystemMessage(String),
SendFile(PathBuf),
ChangeNick(String),
Connect(String),
ToggleVoice,
ToggleCamera,
ToggleScreen,
SelectMic(String), // node_name of selected mic
SelectSpeaker(String), // node_name of selected speaker
SetBitrate(u32),
Leave,
Quit,
None,
}
use crate::config::Theme;
// ... imports ...
/// Application state for the TUI.
pub struct App {
pub input: String,
pub input_mode: InputMode,
pub cursor_position: usize,
pub scroll_offset: usize,
#[allow(dead_code)]
pub show_file_panel: bool,
pub file_path_input: String,
pub theme: Theme,
// Device selection state (reused for Mic and Speaker)
pub audio_devices: Vec<AudioDevice>,
pub device_selected_index: usize,
}
impl App {
pub fn new(theme: Theme) -> Self {
Self {
input: String::new(),
input_mode: InputMode::Editing,
cursor_position: 0,
scroll_offset: 0,
show_file_panel: true,
file_path_input: String::new(),
theme,
audio_devices: Vec::new(),
device_selected_index: 0,
}
}
/// Open the mic selection screen.
pub fn open_mic_select(&mut self, sources: Vec<AudioDevice>) {
self.audio_devices = sources;
self.device_selected_index = 0;
self.input_mode = InputMode::MicSelect;
}
/// Open the speaker selection screen.
pub fn open_speaker_select(&mut self, sinks: Vec<AudioDevice>) {
self.audio_devices = sinks;
self.device_selected_index = 0;
self.input_mode = InputMode::SpeakerSelect;
}
/// Handle a key event and return a command.
pub fn handle_key(&mut self, key: KeyEvent) -> TuiCommand {
match self.input_mode {
InputMode::MicSelect => self.handle_device_select_key(key),
InputMode::SpeakerSelect => self.handle_device_select_key(key),
InputMode::FilePrompt => self.handle_file_prompt_key(key),
InputMode::Editing => self.handle_editing_key(key),
InputMode::Normal => self.handle_normal_key(key),
}
}
fn handle_editing_key(&mut self, key: KeyEvent) -> TuiCommand {
match key.code {
KeyCode::Enter => {
if !self.input.is_empty() {
let text: String = self.input.drain(..).collect();
self.cursor_position = 0;
// Parse slash commands
if let Some(cmd) = text.strip_prefix('/') {
let parts: Vec<&str> = cmd.splitn(2, ' ').collect();
match parts[0] {
"nick" | "name" => {
let new_name = parts.get(1).unwrap_or(&"").trim();
if new_name.is_empty() {
return TuiCommand::SystemMessage(
"Usage: /nick <new_name>".to_string(),
);
}
return TuiCommand::ChangeNick(new_name.to_string());
}
"connect" | "join" => {
let peer_id = parts.get(1).unwrap_or(&"").trim();
if peer_id.is_empty() {
return TuiCommand::SystemMessage(
"Usage: /connect <peer_id>".to_string(),
);
}
return TuiCommand::Connect(peer_id.to_string());
}
"voice" => return TuiCommand::ToggleVoice,
"mic" | "microphone" => {
// Open mic selection screen
let sources = crate::media::voice::list_audio_sources();
if sources.is_empty() {
return TuiCommand::SystemMessage(
"🎤 No audio sources found (is PipeWire running?)".to_string(),
);
}
self.open_mic_select(sources);
return TuiCommand::None;
}
"speaker" | "output" => {
// Open speaker selection screen
let sinks = crate::media::voice::list_audio_sinks();
if sinks.is_empty() {
return TuiCommand::SystemMessage(
"🔊 No audio outputs found (is PipeWire running?)".to_string(),
);
}
self.open_speaker_select(sinks);
return TuiCommand::None;
}
"camera" | "cam" => return TuiCommand::ToggleCamera,
"screen" | "share" => return TuiCommand::ToggleScreen,
"file" | "send" => {
let path = parts.get(1).unwrap_or(&"").trim();
if path.is_empty() {
// Open native file dialog
use std::process::Command;
let result = Command::new("zenity")
.args(["--file-selection", "--title=Select file to send"])
.output()
.or_else(|_| {
Command::new("kdialog")
.args(["--getopenfilename", "."])
.output()
});
match result {
Ok(output) if output.status.success() => {
let chosen = String::from_utf8_lossy(&output.stdout)
.trim()
.to_string();
if !chosen.is_empty() {
return TuiCommand::SendFile(PathBuf::from(chosen));
}
return TuiCommand::None; // cancelled
}
_ => {
return TuiCommand::SystemMessage(
"No file dialog available. Use: /file <path>".to_string(),
);
}
}
}
return TuiCommand::SendFile(PathBuf::from(path));
}
"quit" | "q" => return TuiCommand::Quit,
"leave" => return TuiCommand::Leave,
"help" => {
return TuiCommand::SystemMessage(
"Commands: /nick <name>, /connect <id>, /voice, /bitrate <kbps>, /mic, /camera, /screen, /file <path>, /leave, /quit".to_string(),
);
}
"bitrate" => {
let kbps_str = parts.get(1).unwrap_or(&"").trim();
if let Ok(kbps) = kbps_str.parse::<u32>() {
return TuiCommand::SetBitrate(kbps * 1000);
} else {
return TuiCommand::SystemMessage("Usage: /bitrate <kbps> (e.g. 128)".to_string());
}
}
_ => {
return TuiCommand::SystemMessage(
format!("Unknown command: /{}. Type /help", parts[0]),
);
}
}
}
return TuiCommand::SendMessage(text);
}
TuiCommand::None
}
KeyCode::Char(c) => {
self.input.insert(self.cursor_position, c);
self.cursor_position += 1;
TuiCommand::None
}
KeyCode::Backspace => {
if self.cursor_position > 0 {
self.cursor_position -= 1;
self.input.remove(self.cursor_position);
}
TuiCommand::None
}
KeyCode::Delete => {
if self.cursor_position < self.input.len() {
self.input.remove(self.cursor_position);
}
TuiCommand::None
}
KeyCode::Left => {
if self.cursor_position > 0 {
self.cursor_position -= 1;
}
TuiCommand::None
}
KeyCode::Right => {
if self.cursor_position < self.input.len() {
self.cursor_position += 1;
}
TuiCommand::None
}
KeyCode::Home => {
self.cursor_position = 0;
TuiCommand::None
}
KeyCode::End => {
self.cursor_position = self.input.len();
TuiCommand::None
}
KeyCode::Esc => {
self.input_mode = InputMode::Normal;
TuiCommand::None
}
KeyCode::Up => {
self.scroll_offset = self.scroll_offset.saturating_add(1);
TuiCommand::None
}
KeyCode::Down => {
self.scroll_offset = self.scroll_offset.saturating_sub(1);
TuiCommand::None
}
_ => TuiCommand::None,
}
}
fn handle_normal_key(&mut self, key: KeyEvent) -> TuiCommand {
match key.code {
KeyCode::Char('q') | KeyCode::Char('Q') => TuiCommand::Quit,
KeyCode::Char('/') => {
self.input_mode = InputMode::Editing;
self.input.push('/');
self.cursor_position = 1;
TuiCommand::None
}
KeyCode::Char('i') | KeyCode::Enter => {
self.input_mode = InputMode::Editing;
TuiCommand::None
}
KeyCode::Up => {
self.scroll_offset = self.scroll_offset.saturating_add(1);
TuiCommand::None
}
KeyCode::Down => {
self.scroll_offset = self.scroll_offset.saturating_sub(1);
TuiCommand::None
}
_ => TuiCommand::None,
}
}
fn handle_file_prompt_key(&mut self, key: KeyEvent) -> TuiCommand {
match key.code {
KeyCode::Enter => {
if !self.file_path_input.is_empty() {
let path = PathBuf::from(self.file_path_input.drain(..).collect::<String>());
self.input_mode = InputMode::Editing;
return TuiCommand::SendFile(path);
}
self.input_mode = InputMode::Editing;
TuiCommand::None
}
KeyCode::Char(c) => {
self.file_path_input.push(c);
TuiCommand::None
}
KeyCode::Backspace => {
self.file_path_input.pop();
TuiCommand::None
}
KeyCode::Esc => {
self.file_path_input.clear();
self.input_mode = InputMode::Editing;
TuiCommand::None
}
_ => TuiCommand::None,
}
}
fn handle_device_select_key(&mut self, key: KeyEvent) -> TuiCommand {
match key.code {
KeyCode::Up | KeyCode::Char('k') => {
if self.device_selected_index > 0 {
self.device_selected_index -= 1;
}
TuiCommand::None
}
KeyCode::Down | KeyCode::Char('j') => {
if self.device_selected_index + 1 < self.audio_devices.len() {
self.device_selected_index += 1;
}
TuiCommand::None
}
KeyCode::Enter => {
if let Some(dev) = self.audio_devices.get(self.device_selected_index) {
let node_name = dev.node_name.clone();
let mode = self.input_mode.clone();
self.input_mode = InputMode::Editing;
self.audio_devices.clear();
if mode == InputMode::MicSelect {
return TuiCommand::SelectMic(node_name);
} else {
return TuiCommand::SelectSpeaker(node_name);
}
}
self.input_mode = InputMode::Editing;
TuiCommand::None
}
KeyCode::Esc | KeyCode::Char('q') => {
self.input_mode = InputMode::Editing;
self.audio_devices.clear();
TuiCommand::None
}
_ => TuiCommand::None,
}
}
}
/// Render the full application UI.
pub fn render(
frame: &mut Frame,
app: &App,
chat: &ChatState,
file_mgr: &FileTransferManager,
media: &MediaState,
peers: &[PeerInfo],
our_name: &str,
our_id_short: &str,
connected: bool,
) {
let size = frame.area();
// Main layout: top (content) + bottom (status bar + input)
let main_chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([
Constraint::Min(5), // Content area
Constraint::Length(3), // Input bar
Constraint::Length(1), // Status bar
])
.split(size);
// Content area: left (chat) + right (peers + files)
let content_chunks = Layout::default()
.direction(Direction::Horizontal)
.constraints([
Constraint::Percentage(75), // Chat panel
Constraint::Percentage(25), // Side panel
])
.split(main_chunks[0]);
// Side panel: peers + file transfers
let side_chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([
Constraint::Percentage(50), // Peer list
Constraint::Percentage(50), // File transfers
])
.split(content_chunks[1]);
// Render chat panel
chat_panel::render(frame, content_chunks[0], chat, app);
// Render peer list
peer_panel::render(frame, side_chunks[0], peers, app);
// Render file transfer panel
file_panel::render(frame, side_chunks[1], file_mgr, app);
// Render input bar
input::render(frame, main_chunks[1], app);
// Render status bar
status_bar::render(
frame,
main_chunks[2],
media,
our_name,
our_id_short,
connected,
&app.input_mode,
);
// Render device selection overlay if active
if app.input_mode == InputMode::MicSelect || app.input_mode == InputMode::SpeakerSelect {
render_device_overlay(frame, size, app);
}
}
/// Render the device selection overlay (centered popup).
fn render_device_overlay(frame: &mut Frame, area: Rect, app: &App) {
let popup_width = 60u16.min(area.width.saturating_sub(4));
let popup_height = (app.audio_devices.len() as u16 + 4).min(area.height.saturating_sub(4));
let x = (area.width.saturating_sub(popup_width)) / 2;
let y = (area.height.saturating_sub(popup_height)) / 2;
let popup_area = Rect::new(x, y, popup_width, popup_height);
// Clear the background
frame.render_widget(Clear, popup_area);
let title = if app.input_mode == InputMode::MicSelect {
" 🎤 Select Microphone (↑↓ Enter Esc) "
} else {
" 🔊 Select Speaker (↑↓ Enter Esc) "
};
let block = Block::default()
.title(title)
.borders(Borders::ALL)
.border_style(Style::default().fg(app.theme.border));
let inner = block.inner(popup_area);
frame.render_widget(block, popup_area);
let items: Vec<ListItem> = app
.audio_devices
.iter()
.enumerate()
.map(|(i, dev)| {
let marker = if i == app.device_selected_index { "" } else { " " };
let style = if i == app.device_selected_index {
Style::default()
.fg(app.theme.self_name)
.add_modifier(Modifier::BOLD)
} else {
Style::default().fg(app.theme.text)
};
ListItem::new(Line::from(Span::styled(
format!("{}{}", marker, dev.description),
style,
)))
})
.collect();
let list = List::new(items);
frame.render_widget(list, inner);
}

83
src/tui/peer_panel.rs Normal file
View File

@@ -0,0 +1,83 @@
//! Peer list panel widget.
use ratatui::layout::Rect;
use ratatui::style::{Color, Modifier, Style};
use ratatui::text::{Line, Span};
use ratatui::widgets::{Block, Borders, List, ListItem};
use ratatui::Frame;
use crate::net::PeerInfo;
use crate::tui::App;
pub fn render(frame: &mut Frame, area: Rect, peers: &[PeerInfo], app: &App) {
let block = Block::default()
.title(format!(" 👥 Peers ({}) ", peers.len()))
.borders(Borders::ALL)
.border_style(Style::default().fg(app.theme.border));
let inner = block.inner(area);
frame.render_widget(block, area);
if peers.is_empty() {
let empty = ratatui::widgets::Paragraph::new("No peers connected")
.style(Style::default().fg(Color::DarkGray));
frame.render_widget(empty, inner);
return;
}
let items: Vec<ListItem> = peers
.iter()
.map(|peer| {
let name = peer
.name
.as_deref()
.unwrap_or("unknown");
let id_short = format!("{}", peer.id).chars().take(8).collect::<String>();
let caps = peer
.capabilities
.as_ref()
.map(|c| {
let mut s = String::new();
if c.audio {
s.push_str("🎤");
}
if c.camera {
s.push_str("📷");
}
if c.screen {
s.push_str("🖥");
}
s
})
.unwrap_or_default();
let (marker, marker_color, name_suffix) = if peer.is_self {
("", app.theme.self_name, " (you)")
} else {
("", app.theme.peer_name, "")
};
let line = Line::from(vec![
Span::styled(marker, Style::default().fg(marker_color)),
Span::styled(
name.to_string(),
Style::default()
.fg(app.theme.text)
.add_modifier(Modifier::BOLD),
),
Span::styled(
format!("{} ({})", name_suffix, id_short),
Style::default().fg(app.theme.time),
),
Span::raw(format!(" {}", caps)),
]);
ListItem::new(line)
})
.collect();
let list = List::new(items);
frame.render_widget(list, inner);
}

54
src/tui/status_bar.rs Normal file
View File

@@ -0,0 +1,54 @@
//! Status bar widget.
use ratatui::layout::Rect;
use ratatui::style::{Color, Style};
use ratatui::text::{Line, Span};
use ratatui::widgets::Paragraph;
use ratatui::Frame;
use crate::media::MediaState;
use super::InputMode;
pub fn render(
frame: &mut Frame,
area: Rect,
media: &MediaState,
our_name: &str,
our_id_short: &str,
connected: bool,
input_mode: &InputMode,
) {
let conn_status = if connected {
Span::styled("● Connected", Style::default().fg(Color::Green))
} else {
Span::styled("○ Waiting for peers", Style::default().fg(Color::Yellow))
};
let mode_span = match input_mode {
InputMode::Normal => Span::styled(" NORMAL ", Style::default().fg(Color::Black).bg(Color::Blue)),
InputMode::Editing => Span::styled(" INSERT ", Style::default().fg(Color::Black).bg(Color::Green)),
InputMode::FilePrompt => Span::styled(" FILE ", Style::default().fg(Color::Black).bg(Color::Yellow)),
InputMode::MicSelect => Span::styled(" MIC ", Style::default().fg(Color::Black).bg(Color::Magenta)),
InputMode::SpeakerSelect => Span::styled(" SPKR ", Style::default().fg(Color::Black).bg(Color::Cyan)),
};
let line = Line::from(vec![
Span::raw(" "),
mode_span,
Span::raw(" "),
conn_status,
Span::styled(
format!("{} ({})", our_name, our_id_short),
Style::default().fg(Color::Cyan),
),
Span::styled(
format!("{}", media.status_line()),
Style::default().fg(Color::DarkGray),
),
]);
let paragraph = Paragraph::new(line)
.style(Style::default().bg(Color::Rgb(30, 30, 40)));
frame.render_widget(paragraph, area);
}

166
src/web/mod.rs Normal file
View File

@@ -0,0 +1,166 @@
use axum::{
extract::{
ws::{Message, WebSocket, WebSocketUpgrade},
State,
},
http::{header, StatusCode, Uri},
body::Bytes,
response::IntoResponse,
routing::get,
Router,
};
use rust_embed::RustEmbed;
use std::net::SocketAddr;
use crate::media::WebMediaEvent;
use crate::protocol::MediaKind;
use tokio::sync::broadcast;
use futures::{SinkExt, StreamExt};
#[derive(Clone)]
struct AppState {
tx: broadcast::Sender<WebMediaEvent>,
mic_tx: broadcast::Sender<Vec<f32>>,
cam_tx: broadcast::Sender<Vec<u8>>,
screen_tx: broadcast::Sender<Vec<u8>>,
}
#[derive(RustEmbed)]
#[folder = "web/"]
struct Assets;
pub async fn start_web_server(
tx: broadcast::Sender<WebMediaEvent>,
mic_tx: broadcast::Sender<Vec<f32>>,
cam_tx: broadcast::Sender<Vec<u8>>,
screen_tx: broadcast::Sender<Vec<u8>>,
) {
let state = AppState { tx, mic_tx, cam_tx, screen_tx };
let app = Router::new()
.route("/ws", get(ws_handler))
.fallback(static_handler)
.with_state(state);
let mut port = 6969;
let listener = loop {
let addr = SocketAddr::from(([127, 0, 0, 1], port));
match tokio::net::TcpListener::bind(addr).await {
Ok(l) => {
tracing::info!("Web interface listening on http://{}", addr);
break l;
}
Err(_) => {
port += 1;
if port > 7000 {
tracing::error!("Failed to bind web interface to any port 6969-7000");
return;
}
}
}
};
axum::serve(listener, app).await.unwrap();
}
async fn ws_handler(
ws: WebSocketUpgrade,
State(state): State<AppState>,
) -> impl IntoResponse {
ws.on_upgrade(move |socket| handle_socket(socket, state))
}
async fn handle_socket(socket: WebSocket, state: AppState) {
let (mut sender, mut receiver) = socket.split();
let mut rx = state.tx.subscribe();
tokio::spawn(async move {
while let Ok(event) = rx.recv().await {
let msg = match event {
WebMediaEvent::Audio { peer_id, data: samples } => {
// 1 byte header (0) + 1 byte ID len + ID bytes + f32 bytes
let id_bytes = peer_id.as_bytes();
let id_len = id_bytes.len() as u8;
let mut payload = Vec::with_capacity(1 + 1 + id_bytes.len() + samples.len() * 4);
payload.push(0u8);
payload.push(id_len);
payload.extend_from_slice(id_bytes);
for s in samples {
payload.extend_from_slice(&s.to_ne_bytes());
}
Message::Binary(Bytes::from(payload))
}
WebMediaEvent::Video { peer_id, kind, data } => {
// 1 byte header (1=Camera, 2=Screen) + 1 byte ID len + ID bytes + MJPEG data
let header = match kind {
MediaKind::Camera => 1u8,
MediaKind::Screen => 2u8,
_ => 1u8,
};
let id_bytes = peer_id.as_bytes();
let id_len = id_bytes.len() as u8;
let mut payload = Vec::with_capacity(1 + 1 + id_bytes.len() + data.len());
payload.push(header);
payload.push(id_len);
payload.extend_from_slice(id_bytes);
payload.extend_from_slice(&data);
Message::Binary(Bytes::from(payload))
}
};
if sender.send(msg).await.is_err() {
break;
}
}
});
while let Some(msg) = receiver.next().await {
match msg {
Ok(Message::Binary(data)) => {
if data.is_empty() { continue; }
let header = data[0];
let payload = &data[1..];
match header {
3 => { // Mic (f32 PCM)
// integrity check
if payload.len() % 4 == 0 {
let samples: Vec<f32> = payload
.chunks_exact(4)
.map(|b| f32::from_ne_bytes([b[0], b[1], b[2], b[3]]))
.collect();
// tracing::debug!("Received mic samples: {}", samples.len());
let _ = state.mic_tx.send(samples);
}
}
4 => { // Camera (MJPEG)
tracing::debug!("Received camera frame: {} bytes", payload.len());
let _ = state.cam_tx.send(payload.to_vec());
}
5 => { // Screen (MJPEG)
tracing::debug!("Received screen frame: {} bytes", payload.len());
let _ = state.screen_tx.send(payload.to_vec());
}
_ => {
tracing::warn!("Unknown WS header: {}", header);
}
}
}
Ok(Message::Close(_)) => break,
Err(_) => break,
_ => {}
}
}
}
async fn static_handler(uri: Uri) -> impl IntoResponse {
let path = uri.path().trim_start_matches('/');
let path = if path.is_empty() { "index.html" } else { path };
match Assets::get(path) {
Some(content) => {
let mime = mime_guess::from_path(path).first_or_octet_stream();
([(header::CONTENT_TYPE, mime.as_ref())], content.data).into_response()
}
None => (StatusCode::NOT_FOUND, "404 Not Found").into_response(),
}
}

180
tests/network_tests.rs Normal file
View File

@@ -0,0 +1,180 @@
use anyhow::{Context, Result};
use p2p_chat::net::{NetEvent, NetworkManager};
use p2p_chat::protocol::{ChatMessage, GossipMessage, MediaKind};
use tokio::time::{timeout, Duration};
const TEST_TIMEOUT: Duration = Duration::from_secs(10);
async fn spawn_node(topic: [u8; 32]) -> Result<(NetworkManager, tokio::sync::mpsc::Sender<NetEvent>, tokio::sync::mpsc::Receiver<NetEvent>)> {
NetworkManager::new(topic).await
}
#[tokio::test]
async fn test_p2p_connection_and_gossip() -> Result<()> {
// 0. Setup
let topic = [0u8; 32]; // Consistent topic for this test
let (mut node_a, tx_a, mut rx_a) = spawn_node(topic).await?;
let (mut node_b, tx_b, mut rx_b) = spawn_node(topic).await?;
let addr_a = node_a.endpoint.addr();
let id_a = addr_a.id;
let id_b = node_b.endpoint.id();
println!("Node A: {}", id_a);
println!("Node B: {}", id_b);
// 1. Join Gossip
// Node A starts alone
node_a.join_gossip(vec![], tx_a.clone()).await?;
// Node B joins, bootstrapping from Node A.
// We force a connection first so B knows A's address.
node_b.endpoint.connect(addr_a, iroh_gossip::ALPN).await?;
node_b.join_gossip(vec![id_a], tx_b.clone()).await?;
// 2. Wait for connection (PeerUp)
// Both sides should see each other
let _peer_up_a = timeout(TEST_TIMEOUT, async {
while let Some(event) = rx_a.recv().await {
if let NetEvent::PeerUp(peer_id) = event {
if peer_id == id_b { return Ok(()); }
}
}
anyhow::bail!("Stream ended without PeerUp");
}).await??;
let _peer_up_b = timeout(TEST_TIMEOUT, async {
while let Some(event) = rx_b.recv().await {
if let NetEvent::PeerUp(peer_id) = event {
if peer_id == id_a { return Ok(()); }
}
}
anyhow::bail!("Stream ended without PeerUp");
}).await??;
// 3. Test Gossip Broadcast (Chat Message)
let chat_msg = GossipMessage::Chat(ChatMessage {
sender_name: "Node A".into(),
timestamp: 12345,
text: "Hello Node B!".into(),
});
node_a.broadcast(&chat_msg).await?;
// Node B should receive it
timeout(TEST_TIMEOUT, async {
while let Some(event) = rx_b.recv().await {
if let NetEvent::GossipReceived { from, message } = event {
assert_eq!(from, id_a);
if let GossipMessage::Chat(msg) = message {
assert_eq!(msg.text, "Hello Node B!");
assert_eq!(msg.sender_name, "Node A");
return Ok(());
}
}
}
anyhow::bail!("Stream ended without GossipReceived");
}).await??;
Ok(())
}
#[tokio::test]
async fn test_direct_file_transfer_stream() -> Result<()> {
// 0. Setup and Connect
let topic = [1u8; 32];
let (mut node_a, tx_a, _rx_a) = spawn_node(topic).await?;
let (mut node_b, tx_b, mut rx_b) = spawn_node(topic).await?;
let addr_a = node_a.endpoint.addr();
let id_a = addr_a.id;
let id_b = node_b.endpoint.id();
node_a.join_gossip(vec![], tx_a).await?;
node_b.endpoint.connect(addr_a, iroh_gossip::ALPN).await?;
node_b.join_gossip(vec![id_a], tx_b).await?;
// 1. Open direct stream from A to B
// node_b needs to accept the stream. In the actual app, this is handled by spawn_acceptor loop
// sending NetEvent::IncomingFileStream.
// A opens stream
let (mut send_stream, mut _recv_stream) = node_a.open_file_stream(id_b).await?;
// Send some data
let test_data = b"Hello direct stream";
send_stream.write_all(test_data).await?;
send_stream.finish()?;
// 2. B should receive IncomingFileStream event
let (mut b_recv_stream, from) = timeout(TEST_TIMEOUT, async {
while let Some(event) = rx_b.recv().await {
if let NetEvent::IncomingFileStream { from, recv, .. } = event {
return Ok((recv, from));
}
}
anyhow::bail!("Stream ended without IncomingFileStream");
}).await??;
assert_eq!(from, id_a);
// 3. Read data on B side
let mut buf = vec![0u8; test_data.len()];
b_recv_stream.read_exact(&mut buf).await?;
assert_eq!(buf, test_data);
Ok(())
}
#[tokio::test]
async fn test_media_stream_separation() -> Result<()> {
// 0. Setup and Connect
let topic = [2u8; 32];
let (mut node_a, tx_a, _rx_a) = spawn_node(topic).await?;
let (mut node_b, tx_b, mut rx_b) = spawn_node(topic).await?;
let addr_a = node_a.endpoint.addr();
let id_a = addr_a.id;
let id_b = node_b.endpoint.id();
node_a.join_gossip(vec![], tx_a).await?;
node_b.endpoint.connect(addr_a, iroh_gossip::ALPN).await?;
node_b.join_gossip(vec![id_a], tx_b).await?;
// Helper to verify stream type
async fn verify_stream(
node_sender: &NetworkManager,
peer_id: p2p_chat::net::EndpointId,
rx_receiver: &mut tokio::sync::mpsc::Receiver<NetEvent>,
kind: MediaKind,
) -> Result<()> {
// Sender opens stream
let (mut send, _recv) = node_sender.open_media_stream(peer_id, kind).await?;
send.finish()?;
// Receiver should get IncomingMediaStream with correct kind
timeout(TEST_TIMEOUT, async {
while let Some(event) = rx_receiver.recv().await {
if let NetEvent::IncomingMediaStream { from: _, kind: k, .. } = event {
assert_eq!(k, kind);
return Ok(());
}
}
anyhow::bail!("Stream ended without IncomingMediaStream for {:?}", kind);
}).await??;
Ok(())
}
// 1. Verify Voice Stream
verify_stream(&node_a, id_b, &mut rx_b, MediaKind::Voice).await.context("Voice stream failed")?;
// 2. Verify Camera Stream
verify_stream(&node_a, id_b, &mut rx_b, MediaKind::Camera).await.context("Camera stream failed")?;
// 3. Verify Screen Stream
verify_stream(&node_a, id_b, &mut rx_b, MediaKind::Screen).await.context("Screen stream failed")?;
Ok(())
}

627
tests/tests.rs Normal file
View File

@@ -0,0 +1,627 @@
//! Integration tests for p2p-chat
//!
//! Covers:
//! - Protocol message serialization roundtrips
//! - Config defaults, TOML parsing, and color parsing
//! - TUI App command dispatch & key handling
//! - ChatState history management
// ============================================================================
// Protocol tests
// ============================================================================
mod protocol_tests {
use p2p_chat::protocol::*;
#[test]
fn chat_message_roundtrip() {
let msg = GossipMessage::Chat(ChatMessage {
sender_name: "alice".into(),
timestamp: 1234567890,
text: "Hello, world!".into(),
});
let bytes = postcard::to_allocvec(&msg).unwrap();
let decoded: GossipMessage = postcard::from_bytes(&bytes).unwrap();
match decoded {
GossipMessage::Chat(m) => {
assert_eq!(m.sender_name, "alice");
assert_eq!(m.timestamp, 1234567890);
assert_eq!(m.text, "Hello, world!");
}
_ => panic!("Expected Chat variant"),
}
}
#[test]
fn disconnect_message_roundtrip() {
let msg = GossipMessage::Disconnect {
sender_name: "bob".into(),
};
let bytes = postcard::to_allocvec(&msg).unwrap();
let decoded: GossipMessage = postcard::from_bytes(&bytes).unwrap();
match decoded {
GossipMessage::Disconnect { sender_name } => {
assert_eq!(sender_name, "bob");
}
_ => panic!("Expected Disconnect variant"),
}
}
#[test]
fn name_change_roundtrip() {
let msg = GossipMessage::NameChange(NameChange {
old_name: "anon".into(),
new_name: "alice".into(),
});
let bytes = postcard::to_allocvec(&msg).unwrap();
let decoded: GossipMessage = postcard::from_bytes(&bytes).unwrap();
match decoded {
GossipMessage::NameChange(nc) => {
assert_eq!(nc.old_name, "anon");
assert_eq!(nc.new_name, "alice");
}
_ => panic!("Expected NameChange variant"),
}
}
#[test]
fn peer_announce_roundtrip() {
let msg = GossipMessage::PeerAnnounce(PeerAnnounce {
sender_name: "charlie".into(),
});
let bytes = postcard::to_allocvec(&msg).unwrap();
let decoded: GossipMessage = postcard::from_bytes(&bytes).unwrap();
match decoded {
GossipMessage::PeerAnnounce(pa) => {
assert_eq!(pa.sender_name, "charlie");
}
_ => panic!("Expected PeerAnnounce variant"),
}
}
#[test]
fn capabilities_default() {
let caps = CapabilitiesMessage::default();
assert!(caps.chat);
assert!(caps.files);
assert!(!caps.audio);
assert!(!caps.camera);
assert!(!caps.screen);
assert_eq!(caps.sender_name, "");
}
#[test]
fn file_offer_broadcast_roundtrip() {
let fid = new_file_id();
let msg = GossipMessage::FileOfferBroadcast(FileOfferBroadcast {
sender_name: "dave".into(),
file_id: fid,
file_name: "test.txt".into(),
file_size: 42,
});
let bytes = postcard::to_allocvec(&msg).unwrap();
let decoded: GossipMessage = postcard::from_bytes(&bytes).unwrap();
match decoded {
GossipMessage::FileOfferBroadcast(f) => {
assert_eq!(f.sender_name, "dave");
assert_eq!(f.file_id, fid);
assert_eq!(f.file_name, "test.txt");
assert_eq!(f.file_size, 42);
}
_ => panic!("Expected FileOfferBroadcast variant"),
}
}
#[test]
fn encode_framed_produces_valid_length_prefix() {
let msg = ChatMessage {
sender_name: "test".into(),
timestamp: 0,
text: "hi".into(),
};
let framed = encode_framed(&msg).unwrap();
assert!(framed.len() > 4);
let len = u32::from_be_bytes([framed[0], framed[1], framed[2], framed[3]]) as usize;
assert_eq!(len, framed.len() - 4);
// Verify the payload can be deserialized
let decoded: ChatMessage = postcard::from_bytes(&framed[4..]).unwrap();
assert_eq!(decoded.sender_name, "test");
}
#[test]
fn file_id_is_unique() {
let id1 = new_file_id();
let id2 = new_file_id();
assert_ne!(id1, id2);
}
#[test]
fn media_kind_serialization() {
for kind in [MediaKind::Voice, MediaKind::Camera, MediaKind::Screen] {
let bytes = postcard::to_allocvec(&kind).unwrap();
let decoded: MediaKind = postcard::from_bytes(&bytes).unwrap();
assert_eq!(decoded, kind);
}
}
#[test]
fn media_stream_message_roundtrip() {
let msgs = vec![
MediaStreamMessage::AudioStart {
sample_rate: 48000,
channels: 1,
frame_size_ms: 20,
},
MediaStreamMessage::AudioData {
sequence: 42,
opus_data: vec![0xDE, 0xAD],
},
MediaStreamMessage::AudioStop,
MediaStreamMessage::VideoStart {
kind: MediaKind::Screen,
width: 1920,
height: 1080,
fps: 30,
},
MediaStreamMessage::VideoFrame {
sequence: 1,
timestamp_ms: 1000,
data: vec![0x01, 0x02, 0x03],
},
MediaStreamMessage::VideoStop {
kind: MediaKind::Camera,
},
];
for msg in msgs {
let bytes = postcard::to_allocvec(&msg).unwrap();
let _decoded: MediaStreamMessage = postcard::from_bytes(&bytes).unwrap();
}
}
}
// ============================================================================
// Config tests
// ============================================================================
mod config_tests {
use p2p_chat::config::*;
use ratatui::style::Color;
#[test]
fn default_config_values() {
let config = AppConfig::default();
assert_eq!(config.media.screen_resolution, "1280x720");
assert!(config.media.mic_name.is_none());
assert!(config.network.topic.is_none());
}
#[test]
fn config_toml_roundtrip() {
let config = AppConfig::default();
let toml_str = toml::to_string_pretty(&config).unwrap();
let parsed: AppConfig = toml::from_str(&toml_str).unwrap();
assert_eq!(parsed.media.screen_resolution, config.media.screen_resolution);
assert_eq!(parsed.media.mic_name, config.media.mic_name);
assert_eq!(parsed.network.topic, config.network.topic);
}
#[test]
fn config_partial_toml_uses_defaults() {
let toml_str = r#"
[media]
screen_resolution = "1920x1080"
"#;
let config: AppConfig = toml::from_str(toml_str).unwrap();
assert_eq!(config.media.screen_resolution, "1920x1080");
// network should use default
assert!(config.network.topic.is_none());
}
#[test]
fn parse_color_named_colors() {
assert_eq!(parse_color("red"), Color::Red);
assert_eq!(parse_color("green"), Color::Green);
assert_eq!(parse_color("blue"), Color::Blue);
assert_eq!(parse_color("cyan"), Color::Cyan);
assert_eq!(parse_color("magenta"), Color::Magenta);
assert_eq!(parse_color("yellow"), Color::Yellow);
assert_eq!(parse_color("white"), Color::White);
assert_eq!(parse_color("black"), Color::Black);
assert_eq!(parse_color("gray"), Color::Gray);
assert_eq!(parse_color("dark_gray"), Color::DarkGray);
assert_eq!(parse_color("darkgray"), Color::DarkGray);
}
#[test]
fn parse_color_case_insensitive() {
assert_eq!(parse_color("RED"), Color::Red);
assert_eq!(parse_color("Green"), Color::Green);
assert_eq!(parse_color("BLUE"), Color::Blue);
}
#[test]
fn parse_color_hex_6digit() {
assert_eq!(parse_color("#ff0000"), Color::Rgb(255, 0, 0));
assert_eq!(parse_color("#00ff00"), Color::Rgb(0, 255, 0));
assert_eq!(parse_color("#0000ff"), Color::Rgb(0, 0, 255));
assert_eq!(parse_color("#ffffff"), Color::Rgb(255, 255, 255));
assert_eq!(parse_color("#000000"), Color::Rgb(0, 0, 0));
}
#[test]
fn parse_color_hex_3digit() {
assert_eq!(parse_color("#f00"), Color::Rgb(255, 0, 0));
assert_eq!(parse_color("#0f0"), Color::Rgb(0, 255, 0));
assert_eq!(parse_color("#00f"), Color::Rgb(0, 0, 255));
assert_eq!(parse_color("#fff"), Color::Rgb(255, 255, 255));
}
#[test]
fn parse_color_unknown_falls_back_to_white() {
assert_eq!(parse_color("nonexistent"), Color::White);
assert_eq!(parse_color(""), Color::White);
}
#[test]
fn theme_from_ui_config() {
let ui = UiConfig::default();
let theme: Theme = ui.into();
assert_eq!(theme.border, Color::Cyan);
assert_eq!(theme.text, Color::White);
assert_eq!(theme.self_name, Color::Green);
assert_eq!(theme.peer_name, Color::Magenta);
assert_eq!(theme.system_msg, Color::Yellow);
assert_eq!(theme.time, Color::DarkGray);
}
#[test]
fn ui_config_defaults() {
let ui = UiConfig::default();
assert_eq!(ui.border, "cyan");
assert_eq!(ui.text, "white");
assert_eq!(ui.self_name, "green");
assert_eq!(ui.peer_name, "magenta");
assert_eq!(ui.system_msg, "yellow");
assert_eq!(ui.time, "dark_gray");
}
}
// ============================================================================
// TUI key handling tests
// ============================================================================
mod tui_tests {
use crossterm::event::{KeyCode, KeyEvent, KeyModifiers};
use p2p_chat::config::{Theme, UiConfig};
use p2p_chat::tui::{App, InputMode, TuiCommand};
fn make_app() -> App {
let theme: Theme = UiConfig::default().into();
App::new(theme)
}
fn key(code: KeyCode) -> KeyEvent {
KeyEvent::new(code, KeyModifiers::NONE)
}
fn ctrl_key(code: KeyCode) -> KeyEvent {
KeyEvent::new(code, KeyModifiers::CONTROL)
}
#[test]
fn initial_state() {
let app = make_app();
assert_eq!(app.input_mode, InputMode::Editing);
assert_eq!(app.input, "");
assert_eq!(app.cursor_position, 0);
assert_eq!(app.scroll_offset, 0);
}
#[test]
fn esc_switches_to_normal_mode() {
let mut app = make_app();
assert_eq!(app.input_mode, InputMode::Editing);
let cmd = app.handle_key(key(KeyCode::Esc));
assert!(matches!(cmd, TuiCommand::None));
assert_eq!(app.input_mode, InputMode::Normal);
}
#[test]
fn i_switches_to_editing_mode() {
let mut app = make_app();
app.input_mode = InputMode::Normal;
let cmd = app.handle_key(key(KeyCode::Char('i')));
assert!(matches!(cmd, TuiCommand::None));
assert_eq!(app.input_mode, InputMode::Editing);
}
#[test]
fn enter_in_normal_switches_to_editing() {
let mut app = make_app();
app.input_mode = InputMode::Normal;
let cmd = app.handle_key(key(KeyCode::Enter));
assert!(matches!(cmd, TuiCommand::None));
assert_eq!(app.input_mode, InputMode::Editing);
}
#[test]
fn slash_in_normal_prefills_slash() {
let mut app = make_app();
app.input_mode = InputMode::Normal;
let cmd = app.handle_key(key(KeyCode::Char('/')));
assert!(matches!(cmd, TuiCommand::None));
assert_eq!(app.input_mode, InputMode::Editing);
assert_eq!(app.input, "/");
assert_eq!(app.cursor_position, 1);
}
#[test]
fn q_in_normal_quits() {
let mut app = make_app();
app.input_mode = InputMode::Normal;
let cmd = app.handle_key(key(KeyCode::Char('q')));
assert!(matches!(cmd, TuiCommand::Quit));
}
#[test]
fn typing_characters_in_editing() {
let mut app = make_app();
app.handle_key(key(KeyCode::Char('h')));
app.handle_key(key(KeyCode::Char('i')));
assert_eq!(app.input, "hi");
assert_eq!(app.cursor_position, 2);
}
#[test]
fn backspace_deletes_character() {
let mut app = make_app();
app.handle_key(key(KeyCode::Char('a')));
app.handle_key(key(KeyCode::Char('b')));
app.handle_key(key(KeyCode::Char('c')));
assert_eq!(app.input, "abc");
app.handle_key(key(KeyCode::Backspace));
assert_eq!(app.input, "ab");
assert_eq!(app.cursor_position, 2);
}
#[test]
fn enter_sends_message() {
let mut app = make_app();
app.handle_key(key(KeyCode::Char('h')));
app.handle_key(key(KeyCode::Char('i')));
let cmd = app.handle_key(key(KeyCode::Enter));
assert!(matches!(cmd, TuiCommand::SendMessage(ref s) if s == "hi"));
assert_eq!(app.input, "");
assert_eq!(app.cursor_position, 0);
}
#[test]
fn enter_on_empty_input_does_nothing() {
let mut app = make_app();
let cmd = app.handle_key(key(KeyCode::Enter));
assert!(matches!(cmd, TuiCommand::None));
}
#[test]
fn quit_command() {
let mut app = make_app();
for c in "/quit".chars() {
app.handle_key(key(KeyCode::Char(c)));
}
let cmd = app.handle_key(key(KeyCode::Enter));
assert!(matches!(cmd, TuiCommand::Quit));
}
#[test]
fn help_command_is_system_message() {
let mut app = make_app();
for c in "/help".chars() {
app.handle_key(key(KeyCode::Char(c)));
}
let cmd = app.handle_key(key(KeyCode::Enter));
assert!(matches!(cmd, TuiCommand::SystemMessage(_)));
}
#[test]
fn nick_command_without_name_is_system_message() {
let mut app = make_app();
for c in "/nick".chars() {
app.handle_key(key(KeyCode::Char(c)));
}
let cmd = app.handle_key(key(KeyCode::Enter));
assert!(matches!(cmd, TuiCommand::SystemMessage(_)));
}
#[test]
fn nick_command_with_name() {
let mut app = make_app();
for c in "/nick alice".chars() {
app.handle_key(key(KeyCode::Char(c)));
}
let cmd = app.handle_key(key(KeyCode::Enter));
assert!(matches!(cmd, TuiCommand::ChangeNick(ref s) if s == "alice"));
}
#[test]
fn connect_command_without_id_is_system_message() {
let mut app = make_app();
for c in "/connect".chars() {
app.handle_key(key(KeyCode::Char(c)));
}
let cmd = app.handle_key(key(KeyCode::Enter));
assert!(matches!(cmd, TuiCommand::SystemMessage(_)));
}
#[test]
fn connect_command_with_id() {
let mut app = make_app();
for c in "/connect abc123".chars() {
app.handle_key(key(KeyCode::Char(c)));
}
let cmd = app.handle_key(key(KeyCode::Enter));
assert!(matches!(cmd, TuiCommand::Connect(ref s) if s == "abc123"));
}
#[test]
fn voice_command() {
let mut app = make_app();
for c in "/voice".chars() {
app.handle_key(key(KeyCode::Char(c)));
}
let cmd = app.handle_key(key(KeyCode::Enter));
assert!(matches!(cmd, TuiCommand::ToggleVoice));
}
#[test]
fn camera_command() {
let mut app = make_app();
for c in "/camera".chars() {
app.handle_key(key(KeyCode::Char(c)));
}
let cmd = app.handle_key(key(KeyCode::Enter));
assert!(matches!(cmd, TuiCommand::ToggleCamera));
}
#[test]
fn screen_command() {
let mut app = make_app();
for c in "/screen".chars() {
app.handle_key(key(KeyCode::Char(c)));
}
let cmd = app.handle_key(key(KeyCode::Enter));
assert!(matches!(cmd, TuiCommand::ToggleScreen));
}
#[test]
fn leave_command() {
let mut app = make_app();
for c in "/leave".chars() {
app.handle_key(key(KeyCode::Char(c)));
}
let cmd = app.handle_key(key(KeyCode::Enter));
assert!(matches!(cmd, TuiCommand::Leave));
}
#[test]
fn unknown_command_is_system_message() {
let mut app = make_app();
for c in "/foobar".chars() {
app.handle_key(key(KeyCode::Char(c)));
}
let cmd = app.handle_key(key(KeyCode::Enter));
assert!(matches!(cmd, TuiCommand::SystemMessage(_)));
}
#[test]
fn file_command_with_path() {
let mut app = make_app();
for c in "/file /tmp/test.txt".chars() {
app.handle_key(key(KeyCode::Char(c)));
}
let cmd = app.handle_key(key(KeyCode::Enter));
match cmd {
TuiCommand::SendFile(path) => {
assert_eq!(path.to_str().unwrap(), "/tmp/test.txt");
}
_ => panic!("Expected SendFile, got {:?}", cmd),
}
}
#[test]
fn scroll_up_and_down_in_normal_mode() {
let mut app = make_app();
app.input_mode = InputMode::Normal;
app.handle_key(key(KeyCode::Up));
assert_eq!(app.scroll_offset, 1);
app.handle_key(key(KeyCode::Up));
assert_eq!(app.scroll_offset, 2);
app.handle_key(key(KeyCode::Down));
assert_eq!(app.scroll_offset, 1);
app.handle_key(key(KeyCode::Down));
assert_eq!(app.scroll_offset, 0);
// Should not go below 0
app.handle_key(key(KeyCode::Down));
assert_eq!(app.scroll_offset, 0);
}
#[test]
fn ctrl_c_not_handled_in_tui_layer() {
// Ctrl+C is handled at the signal level (tokio::signal::ctrl_c),
// not in TUI key handling. Crossterm delivers Char('c') with CONTROL modifier,
// which the TUI treats like any other char input.
let mut app = make_app();
app.handle_key(key(KeyCode::Char('h')));
app.handle_key(key(KeyCode::Char('i')));
// Ctrl+C in editing mode inserts 'c' (crossterm behavior)
app.handle_key(ctrl_key(KeyCode::Char('c')));
assert_eq!(app.input, "hic");
}
}
// ============================================================================
// Chat state tests
// ============================================================================
mod chat_tests {
use p2p_chat::chat::ChatState;
use p2p_chat::protocol::ChatMessage;
#[test]
fn new_chat_state() {
let chat = ChatState::new("alice".into());
assert_eq!(chat.our_name, "alice");
assert!(chat.history.is_empty());
}
#[test]
fn add_system_message() {
let mut chat = ChatState::new("alice".into());
chat.add_system_message("test message".into());
assert_eq!(chat.history.len(), 1);
assert!(chat.history[0].is_system);
assert_eq!(chat.history[0].text, "test message");
assert_eq!(chat.history[0].sender_name, "SYSTEM");
}
#[test]
fn receive_message() {
let mut chat = ChatState::new("alice".into());
chat.receive_message(ChatMessage {
sender_name: "bob".into(),
timestamp: 1234567890,
text: "hello".into(),
});
assert_eq!(chat.history.len(), 1);
assert!(!chat.history[0].is_self);
assert!(!chat.history[0].is_system);
assert_eq!(chat.history[0].sender_name, "bob");
assert_eq!(chat.history[0].text, "hello");
}
#[test]
fn history_trimming() {
let mut chat = ChatState::new("alice".into());
chat.max_history = 5;
for i in 0..10 {
chat.add_system_message(format!("msg {}", i));
}
assert_eq!(chat.history.len(), 5);
// Should keep the last 5 messages
assert_eq!(chat.history[0].text, "msg 5");
assert_eq!(chat.history[4].text, "msg 9");
}
#[test]
fn multiple_message_types() {
let mut chat = ChatState::new("alice".into());
chat.add_system_message("welcome".into());
chat.receive_message(ChatMessage {
sender_name: "bob".into(),
timestamp: 100,
text: "hi alice".into(),
});
chat.add_system_message("bob left".into());
assert_eq!(chat.history.len(), 3);
assert!(chat.history[0].is_system);
assert!(!chat.history[1].is_system);
assert_eq!(chat.history[1].sender_name, "bob");
assert!(chat.history[2].is_system);
}
}

353
web/app.js Normal file
View File

@@ -0,0 +1,353 @@
const toggleMicBtn = document.getElementById('toggle-mic');
const toggleCamBtn = document.getElementById('toggle-cam');
const toggleScreenBtn = document.getElementById('toggle-screen');
const statusEl = document.getElementById('status');
const remoteStreamsContainer = document.getElementById('remote-streams');
const localVideo = document.getElementById('local-video');
// --- Local Media State ---
let micStream = null;
let micSource = null;
let camStream = null;
let screenStream = null;
let micScriptProcessor = null;
let audioCtx = null;
const SAMPLE_RATE = 48000;
// --- Remote Peer State ---
// Map<peerId (string), { audioCtx, nextStartTime, videoEl, screenEl }>
// Note: We can use a single AudioContext for all peers, but need separate scheduler times.
const peers = new Map();
// Initialize shared AudioContext for playback
function getAudioContext() {
if (!audioCtx) {
audioCtx = new (window.AudioContext || window.webkitAudioContext)({
sampleRate: SAMPLE_RATE,
});
}
if (audioCtx.state === 'suspended') {
audioCtx.resume();
}
return audioCtx;
}
// --- WebSocket Setup ---
const ws = new WebSocket(`ws://${location.host}/ws`);
ws.binaryType = 'arraybuffer';
ws.onopen = () => {
statusEl.textContent = 'Connected';
statusEl.style.color = '#4ade80';
};
ws.onclose = () => {
statusEl.textContent = 'Disconnected';
statusEl.style.color = '#f87171';
};
ws.onmessage = (event) => {
const data = event.data;
if (data instanceof ArrayBuffer) {
// [Header(1)][ID_Len(1)][ID...][Data...]
const view = new DataView(data);
if (view.byteLength < 2) return;
const header = view.getUint8(0);
const idLen = view.getUint8(1);
if (view.byteLength < 2 + idLen) return;
// Extract ID
const idBytes = new Uint8Array(data, 2, idLen);
const peerId = new TextDecoder().decode(idBytes);
// Extract Payload
const payload = data.slice(2 + idLen);
// Get or Create Peer
let peer = peers.get(peerId);
if (!peer) {
peer = createPeer(peerId);
peers.set(peerId, peer);
}
if (header === 0) { // Audio
handleRemoteAudio(peer, payload);
} else if (header === 1) { // Id 1 = Camera
handleRemoteVideo(peer, payload, 'camera');
} else if (header === 2) { // Id 2 = Screen
handleRemoteVideo(peer, payload, 'screen');
}
}
};
function createPeer(peerId) {
// visual card
const card = document.createElement('div');
card.className = 'peer-card';
card.id = `peer-${peerId}`;
const header = document.createElement('div');
header.className = 'peer-header';
header.innerHTML = `<span>${peerId}</span> <span class="indicators"></span>`;
card.appendChild(header);
// Container for multiple streams
const mediaContainer = document.createElement('div');
mediaContainer.className = 'peer-media';
card.appendChild(mediaContainer);
remoteStreamsContainer.appendChild(card);
return {
id: peerId,
mediaContainer: mediaContainer,
camImg: null,
screenImg: null,
nextStartTime: 0,
lastActivity: Date.now()
};
}
function handleRemoteAudio(peer, arrayBuffer) {
const ctx = getAudioContext();
const float32Data = new Float32Array(arrayBuffer);
const buffer = ctx.createBuffer(1, float32Data.length, SAMPLE_RATE);
buffer.copyToChannel(float32Data, 0);
const source = ctx.createBufferSource();
source.buffer = buffer;
source.connect(ctx.destination);
const now = ctx.currentTime;
// Reset if behind
if (peer.nextStartTime < now) {
peer.nextStartTime = now + 0.02; // Reduced from 0.05
}
// Cap if too far ahead (latency reduction)
if (peer.nextStartTime > now + 0.5) {
console.warn("High latency detected, resetting playhead", peer.nextStartTime - now);
peer.nextStartTime = now + 0.02;
// Note: This overlaps/mixes with existing queued buffers, but helps catch up.
// Ideally we should stop previous sources, but we don't track them yet.
}
source.start(peer.nextStartTime);
peer.nextStartTime += buffer.duration;
// Visual indicator?
peer.lastActivity = Date.now();
updatePeerStatus(peer, '🎤');
}
function handleRemoteVideo(peer, arrayBuffer, kind) {
const blob = new Blob([arrayBuffer], { type: 'image/jpeg' });
const url = URL.createObjectURL(blob);
let img = kind === 'camera' ? peer.camImg : peer.screenImg;
if (!img) {
img = document.createElement('img');
img.className = kind; // 'camera' or 'screen'
img.alt = `${kind} from ${peer.id}`;
peer.mediaContainer.appendChild(img);
if (kind === 'camera') peer.camImg = img;
else peer.screenImg = img;
}
const prevUrl = img.src;
img.onload = () => {
if (prevUrl && prevUrl.startsWith('blob:')) {
URL.revokeObjectURL(prevUrl);
}
};
img.src = url;
peer.lastActivity = Date.now();
updatePeerStatus(peer, kind === 'camera' ? '📷' : '🖥');
}
function updatePeerStatus(peer, icon) {
// optionally update status indicators in header
}
// --- Local Capture Controls ---
toggleMicBtn.addEventListener('click', async () => {
if (micStream) {
stopMic();
toggleMicBtn.classList.remove('active');
toggleMicBtn.textContent = 'Start Microphone';
} else {
await startMic();
toggleMicBtn.classList.add('active');
toggleMicBtn.textContent = 'Stop Microphone';
}
});
toggleCamBtn.addEventListener('click', async () => {
if (camStream) {
stopCam();
toggleCamBtn.classList.remove('active');
toggleCamBtn.textContent = 'Start Camera';
localVideo.srcObject = null;
} else {
await startCam();
toggleCamBtn.classList.add('active');
toggleCamBtn.textContent = 'Stop Camera';
}
});
toggleScreenBtn.addEventListener('click', async () => {
if (screenStream) {
stopScreen();
toggleScreenBtn.classList.remove('active');
toggleScreenBtn.textContent = 'Start Screen Share';
localVideo.srcObject = null;
} else {
await startScreen();
toggleScreenBtn.classList.add('active');
toggleScreenBtn.textContent = 'Stop Screen Share';
}
});
async function startMic() {
const ctx = getAudioContext();
try {
micStream = await navigator.mediaDevices.getUserMedia({ audio: true });
micSource = ctx.createMediaStreamSource(micStream);
// Use smaller buffer for lower latency (4096 -> 2048 or 1024)
micScriptProcessor = ctx.createScriptProcessor(2048, 1, 1);
micScriptProcessor.onaudioprocess = (e) => {
if (!micStream) return;
if (ws.readyState === WebSocket.OPEN) {
const inputData = e.inputBuffer.getChannelData(0);
// Send: Header 3 (Mic) + Floats
// Optimize: direct Float32Array view
const payloadLen = inputData.length * 4;
const buffer = new ArrayBuffer(1 + payloadLen);
const view = new DataView(buffer);
view.setUint8(0, 3);
// Fast copy using typed array constructor if possible, but we need byte offset 1
// Just use loop for now but with reduced overhead?
// actually, we can create a Float32Array on the buffer at offset 4 (idx 1)?
// No, offset must be multiple of element size (4). 1 is not.
// So we have to copy.
// But we can use setFloat32 in loop (as before).
for (let i = 0; i < inputData.length; i++) {
view.setFloat32(1 + i * 4, inputData[i], true);
}
ws.send(buffer);
}
};
micSource.connect(micScriptProcessor);
// Mute local feedback
const mute = ctx.createGain();
mute.gain.value = 0;
micScriptProcessor.connect(mute);
mute.connect(ctx.destination);
} catch (err) {
console.error('Error starting mic:', err);
}
}
function stopMic() {
if (micStream) {
micStream.getTracks().forEach(t => t.stop());
micStream = null;
}
if (micSource) {
micSource.disconnect();
micSource = null;
}
if (micScriptProcessor) {
micScriptProcessor.onaudioprocess = null;
micScriptProcessor.disconnect();
micScriptProcessor = null;
}
}
async function startCam() {
try {
camStream = await navigator.mediaDevices.getUserMedia({ video: { width: 640, height: 480 } });
localVideo.srcObject = camStream;
startVideoSender(camStream, 4);
} catch (err) {
console.error('Error starting camera:', err);
alert('Failed to access camera');
}
}
function stopCam() {
if (camStream) {
camStream.getTracks().forEach(t => t.stop());
camStream = null;
}
}
async function startScreen() {
try {
screenStream = await navigator.mediaDevices.getDisplayMedia({ video: true });
// Prioritize screen for local preview if both active?
localVideo.srcObject = screenStream;
startVideoSender(screenStream, 5);
screenStream.getVideoTracks()[0].onended = () => {
stopScreen();
toggleScreenBtn.classList.remove('active');
toggleScreenBtn.textContent = 'Start Screen Share';
};
} catch (err) {
console.error('Error starting screen:', err);
}
}
function stopScreen() {
if (screenStream) {
screenStream.getTracks().forEach(t => t.stop());
screenStream = null;
}
}
function startVideoSender(stream, headerByte) {
const video = document.createElement('video');
video.srcObject = stream;
video.play();
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d');
const sendFrame = () => {
if (!stream.active) return;
if (video.readyState === video.HAVE_ENOUGH_DATA) {
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
ctx.drawImage(video, 0, 0);
canvas.toBlob((blob) => {
if (!blob) return;
const reader = new FileReader();
reader.onloadend = () => {
if (ws.readyState === WebSocket.OPEN) {
const arrayBuffer = reader.result;
const buffer = new ArrayBuffer(1 + arrayBuffer.byteLength);
const view = new Uint8Array(buffer);
view[0] = headerByte;
view.set(new Uint8Array(arrayBuffer), 1);
ws.send(buffer);
}
};
reader.readAsArrayBuffer(blob);
}, 'image/jpeg', 0.6);
}
setTimeout(sendFrame, 100); // 10 FPS
};
sendFrame();
}

35
web/index.html Normal file
View File

@@ -0,0 +1,35 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>P2P Chat Media</title>
<link rel="stylesheet" href="style.css">
</head>
<body>
<div class="container">
<header>
<h1>P2P Chat Media</h1>
<div id="status" class="status">Connecting...</div>
</header>
<main>
<div id="remote-streams" class="remote-streams">
<!-- Dynamic peer streams will appear here -->
</div>
<div class="local-preview">
<h3>Local Preview</h3>
<video id="local-video" autoplay muted></video>
</div>
<div class="audio-controls">
<button id="toggle-mic">Start Microphone</button>
<button id="toggle-cam">Start Camera</button>
<button id="toggle-screen">Start Screen Share</button>
</div>
</main>
</div>
<script src="app.js"></script>
</body>
</html>

121
web/style.css Normal file
View File

@@ -0,0 +1,121 @@
/* Base Styles */
body {
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
background-color: #1e1e1e;
color: #e0e0e0;
margin: 0;
padding: 20px;
display: flex;
justify-content: center;
}
.container {
width: 100%;
max-width: 1200px;
}
header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 20px;
border-bottom: 1px solid #333;
padding-bottom: 10px;
}
.status {
font-weight: bold;
color: #fca5a5; /* Red-ish for disconnected */
}
/* Remote Streams Grid */
.remote-streams {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
gap: 20px;
margin-bottom: 30px;
}
.peer-card {
background-color: #2d2d2d;
border-radius: 8px;
overflow: hidden;
box-shadow: 0 4px 6px rgba(0,0,0,0.3);
display: flex;
flex-direction: column;
}
.peer-header {
background-color: #333;
padding: 8px 12px;
font-size: 0.9em;
font-weight: bold;
color: #aaa;
display: flex;
justify-content: space-between;
}
.peer-media {
position: relative;
width: 100%;
/* Aspect ratio placeholder or min-height */
min-height: 200px;
background-color: #000;
display: flex;
justify-content: center;
align-items: center;
}
.peer-media img {
width: 100%;
height: auto;
display: block;
max-height: 400px;
object-fit: contain;
}
.peer-media .placeholder {
color: #555;
font-size: 0.8em;
}
/* Local Preview */
.local-preview {
margin-top: 20px;
border-top: 1px solid #333;
padding-top: 20px;
opacity: 0.6;
}
.local-preview video {
width: 200px;
border-radius: 4px;
background: #000;
}
/* Controls */
.audio-controls {
display: flex;
gap: 15px;
margin-top: 20px;
justify-content: center;
}
button {
padding: 10px 20px;
border: none;
border-radius: 4px;
background-color: #3b82f6;
color: white;
font-weight: bold;
cursor: pointer;
transition: background-color 0.2s;
}
button:hover {
background-color: #2563eb;
}
button.active {
background-color: #ef4444; /* Red for Stop */
}