535 lines
18 KiB
JavaScript
535 lines
18 KiB
JavaScript
const toggleMicBtn = document.getElementById('toggle-mic');
|
|
const toggleCamBtn = document.getElementById('toggle-cam');
|
|
const toggleScreenBtn = document.getElementById('toggle-screen');
|
|
const statusOverlay = document.getElementById('status-overlay');
|
|
const connectionStatus = document.getElementById('connection-status');
|
|
const videoGrid = document.getElementById('video-grid');
|
|
const localVideo = document.getElementById('local-video');
|
|
|
|
// --- Local Media State ---
|
|
let micStream = null;
|
|
let micSource = null;
|
|
let camStream = null;
|
|
let screenStream = null;
|
|
let micScriptProcessor = null;
|
|
let audioCtx = null;
|
|
const SAMPLE_RATE = 48000;
|
|
|
|
// Video Encoding State
|
|
let videoEncoder = null;
|
|
let screenEncoder = null;
|
|
let screenCanvasLoop = null; // Added
|
|
let frameCounter = 0;
|
|
|
|
// --- Remote Peer State ---
|
|
// Map<peerId, {
|
|
// id: string,
|
|
// nextStartTime: number,
|
|
// cam: { card: HTMLElement, canvas: HTMLCanvasElement, decoder: VideoDecoder, status: HTMLElement } | null,
|
|
// screen: { card: HTMLElement, canvas: HTMLCanvasElement, decoder: VideoDecoder, status: HTMLElement } | null,
|
|
// }>
|
|
const peers = new Map();
|
|
|
|
// Initialize shared AudioContext for playback
|
|
function getAudioContext() {
|
|
if (!audioCtx) {
|
|
audioCtx = new (window.AudioContext || window.webkitAudioContext)({
|
|
sampleRate: SAMPLE_RATE,
|
|
});
|
|
}
|
|
if (audioCtx.state === 'suspended') {
|
|
audioCtx.resume();
|
|
}
|
|
return audioCtx;
|
|
}
|
|
|
|
// --- WebSocket Setup ---
|
|
const ws = new WebSocket(`ws://${location.host}/ws`);
|
|
ws.binaryType = 'arraybuffer';
|
|
|
|
ws.onopen = () => {
|
|
statusOverlay.style.display = 'none';
|
|
connectionStatus.innerHTML = '<span class="material-icons">wifi</span>';
|
|
connectionStatus.classList.add('connected');
|
|
connectionStatus.title = "Connected";
|
|
};
|
|
|
|
ws.onclose = () => {
|
|
statusOverlay.style.display = 'flex';
|
|
statusOverlay.querySelector('h2').textContent = "Disconnected. Reconnecting...";
|
|
connectionStatus.innerHTML = '<span class="material-icons">wifi_off</span>';
|
|
connectionStatus.classList.remove('connected');
|
|
connectionStatus.title = "Disconnected";
|
|
};
|
|
|
|
ws.onmessage = (event) => {
|
|
const data = event.data;
|
|
if (data instanceof ArrayBuffer) {
|
|
const view = new DataView(data);
|
|
if (view.byteLength < 2) return;
|
|
|
|
const header = view.getUint8(0);
|
|
const idLen = view.getUint8(1);
|
|
|
|
if (view.byteLength < 2 + idLen) return;
|
|
|
|
// Extract ID
|
|
const idBytes = new Uint8Array(data, 2, idLen);
|
|
let peerId = new TextDecoder().decode(idBytes);
|
|
|
|
// Extract Payload
|
|
const payload = data.slice(2 + idLen);
|
|
|
|
// Get or Create Peer
|
|
let peer = peers.get(peerId);
|
|
if (!peer) {
|
|
peer = {
|
|
id: peerId,
|
|
nextStartTime: 0,
|
|
cam: null,
|
|
screen: null
|
|
};
|
|
peers.set(peerId, peer);
|
|
handlePeerConnected(peer); // Call new handler for peer connection
|
|
}
|
|
|
|
if (header === 0) { // Audio
|
|
handleRemoteAudio(peer, payload);
|
|
} else if (header === 1) { // Video (Camera)
|
|
handleRemoteVideo(peer, payload, 'cam');
|
|
} else if (header === 2) { // Screen
|
|
handleRemoteVideo(peer, payload, 'screen'); // Treat screen separate
|
|
}
|
|
}
|
|
};
|
|
|
|
function getOrCreateCard(peer, type) {
|
|
if (peer[type]) return peer[type];
|
|
|
|
const card = document.createElement('div');
|
|
card.className = 'peer-card';
|
|
card.id = `peer-${peer.id}-${type}`;
|
|
|
|
// Video canvas element
|
|
const canvas = document.createElement('canvas');
|
|
canvas.className = 'peer-video';
|
|
// canvas.alt = `${type} from ${peer.id}`;
|
|
card.appendChild(canvas);
|
|
|
|
// Overlay info
|
|
const info = document.createElement('div');
|
|
info.className = 'peer-info';
|
|
let label = peer.id.substring(0, 8);
|
|
if (type === 'screen') label += " (Screen)";
|
|
|
|
info.innerHTML = `
|
|
<div class="peer-status" id="status-${peer.id}-${type}"></div>
|
|
<span class="peer-name">${label}</span>
|
|
`;
|
|
card.appendChild(info);
|
|
|
|
videoGrid.appendChild(card);
|
|
|
|
// Initialize VideoDecoder
|
|
const decoder = new VideoDecoder({
|
|
output: (frame) => {
|
|
// Draw frame to canvas
|
|
console.debug(`[Decoder] Frame decoded: ${frame.displayWidth}x${frame.displayHeight}`);
|
|
canvas.width = frame.displayWidth;
|
|
canvas.height = frame.displayHeight;
|
|
const ctx = canvas.getContext('2d');
|
|
ctx.drawImage(frame, 0, 0);
|
|
frame.close();
|
|
|
|
updatePeerActivity(cardObj, false);
|
|
},
|
|
error: (e) => {
|
|
console.error(`[Decoder] Error (${type}):`, e);
|
|
statusOverlay.style.display = 'flex';
|
|
let statusText = `Decoding H.264 from ${peer.id}...`;
|
|
statusOverlay.querySelector('h2').textContent = `${statusText} Video Decoder Error: ${e.message}`;
|
|
}
|
|
});
|
|
|
|
console.log(`[Decoder] Configuring H.264 decoder for ${peer.id} (${type})`);
|
|
try {
|
|
decoder.configure({
|
|
codec: 'avc1.42E01E', // H.264 Constrained Baseline
|
|
optimizeForLatency: true
|
|
});
|
|
} catch (err) {
|
|
console.error(`[Decoder] Configuration failed:`, err);
|
|
}
|
|
|
|
const cardObj = {
|
|
card: card,
|
|
canvas: canvas,
|
|
decoder: decoder,
|
|
statusElement: info.querySelector('.peer-status'),
|
|
activityTimeout: null
|
|
};
|
|
|
|
peer[type] = cardObj;
|
|
return cardObj;
|
|
}
|
|
|
|
function handleRemoteAudio(peer, arrayBuffer) {
|
|
// Audio usually associated with Camera card. If no cam card, create one (placeholder).
|
|
const cardObj = getOrCreateCard(peer, 'cam');
|
|
|
|
const ctx = getAudioContext();
|
|
const float32Data = new Float32Array(arrayBuffer);
|
|
const buffer = ctx.createBuffer(1, float32Data.length, SAMPLE_RATE);
|
|
buffer.copyToChannel(float32Data, 0);
|
|
|
|
const source = ctx.createBufferSource();
|
|
source.buffer = buffer;
|
|
source.connect(ctx.destination);
|
|
|
|
const now = ctx.currentTime;
|
|
if (peer.nextStartTime < now) {
|
|
peer.nextStartTime = now + 0.02;
|
|
}
|
|
// Latency catch-up
|
|
if (peer.nextStartTime > now + 0.5) {
|
|
peer.nextStartTime = now + 0.02;
|
|
}
|
|
|
|
source.start(peer.nextStartTime);
|
|
peer.nextStartTime += buffer.duration;
|
|
|
|
// Visual indicator
|
|
updatePeerActivity(cardObj, true);
|
|
}
|
|
|
|
function handleRemoteVideo(peer, arrayBuffer, type) {
|
|
const cardObj = getOrCreateCard(peer, type);
|
|
|
|
// Payload format: [1 byte frame type] [N bytes encoded chunk]
|
|
// Frame Type: 0 = Key, 1 = Delta
|
|
const view = new DataView(arrayBuffer);
|
|
const isKey = view.getUint8(0) === 0;
|
|
const chunkData = arrayBuffer.slice(1);
|
|
|
|
const chunk = new EncodedVideoChunk({
|
|
type: isKey ? 'key' : 'delta',
|
|
timestamp: performance.now() * 1000, // Use local time for now, or derive from seq
|
|
data: chunkData
|
|
});
|
|
|
|
try {
|
|
if (cardObj.decoder.state === 'configured') {
|
|
cardObj.decoder.decode(chunk);
|
|
} else {
|
|
console.warn(`[Decoder] Not configured yet, dropping chunk (Key: ${isKey})`);
|
|
}
|
|
} catch (e) {
|
|
console.error("[Decoder] Decode exception:", e);
|
|
}
|
|
}
|
|
|
|
function updatePeerActivity(cardObj, isAudio) {
|
|
if (isAudio) {
|
|
cardObj.statusElement.classList.add('speaking');
|
|
// Debounce removal
|
|
if (cardObj.activityTimeout) clearTimeout(cardObj.activityTimeout);
|
|
cardObj.activityTimeout = setTimeout(() => {
|
|
cardObj.statusElement.classList.remove('speaking');
|
|
}, 200);
|
|
}
|
|
}
|
|
|
|
function handlePeerConnected(peer) {
|
|
// Peer connected (or local preview)
|
|
console.log(`[App] Peer connected: ${peer.id}`);
|
|
|
|
// Create card if not exists
|
|
let cardObj = getOrCreateCard(peer, 'cam'); // Assume cam card for general peer info
|
|
|
|
// If it's local preview, update the status
|
|
if (peer.id === 'local') {
|
|
const statusDot = cardObj.card.querySelector('.peer-status');
|
|
if (statusDot) statusDot.style.backgroundColor = '#3b82f6';
|
|
const nameLabel = cardObj.card.querySelector('.peer-name'); // Updated selector
|
|
if (nameLabel) nameLabel.textContent = "Local Preview (H.264)";
|
|
}
|
|
}
|
|
|
|
// --- Local Capture Controls ---
|
|
|
|
function updateButton(btn, active, iconOn, iconOff) {
|
|
const iconSpan = btn.querySelector('.material-icons');
|
|
if (active) {
|
|
btn.classList.add('active');
|
|
// btn.classList.remove('danger'); // Optional: use danger for stop?
|
|
iconSpan.textContent = iconOn;
|
|
} else {
|
|
btn.classList.remove('active');
|
|
iconSpan.textContent = iconOff;
|
|
}
|
|
}
|
|
|
|
toggleMicBtn.addEventListener('click', async () => {
|
|
if (micStream) {
|
|
stopMic();
|
|
updateButton(toggleMicBtn, false, 'mic', 'mic_off');
|
|
} else {
|
|
const success = await startMic();
|
|
if (success) {
|
|
updateButton(toggleMicBtn, true, 'mic', 'mic_off');
|
|
}
|
|
}
|
|
});
|
|
|
|
toggleCamBtn.addEventListener('click', async () => {
|
|
if (camStream) {
|
|
stopCam();
|
|
updateButton(toggleCamBtn, false, 'videocam', 'videocam_off');
|
|
localVideo.srcObject = null;
|
|
} else {
|
|
await startCam();
|
|
updateButton(toggleCamBtn, true, 'videocam', 'videocam_off');
|
|
}
|
|
});
|
|
|
|
toggleScreenBtn.addEventListener('click', async () => {
|
|
if (screenStream) {
|
|
stopScreen();
|
|
updateButton(toggleScreenBtn, false, 'screen_share', 'screen_share');
|
|
// Restore cam if active?
|
|
if (camStream) localVideo.srcObject = camStream;
|
|
else localVideo.srcObject = null;
|
|
} else {
|
|
await startScreen();
|
|
updateButton(toggleScreenBtn, true, 'stop_screen_share', 'screen_share');
|
|
}
|
|
});
|
|
|
|
async function startMic() {
|
|
const ctx = getAudioContext();
|
|
try {
|
|
micStream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
micSource = ctx.createMediaStreamSource(micStream);
|
|
micScriptProcessor = ctx.createScriptProcessor(2048, 1, 1);
|
|
|
|
micScriptProcessor.onaudioprocess = (e) => {
|
|
if (!micStream) return;
|
|
if (ws.readyState === WebSocket.OPEN) {
|
|
const inputData = e.inputBuffer.getChannelData(0);
|
|
const buffer = new ArrayBuffer(1 + inputData.length * 4);
|
|
const view = new DataView(buffer);
|
|
view.setUint8(0, 3); // Header 3 = Mic
|
|
for (let i = 0; i < inputData.length; i++) {
|
|
view.setFloat32(1 + i * 4, inputData[i], true);
|
|
}
|
|
ws.send(buffer);
|
|
}
|
|
};
|
|
|
|
micSource.connect(micScriptProcessor);
|
|
// Mute local feedback
|
|
const mute = ctx.createGain();
|
|
mute.gain.value = 0;
|
|
micScriptProcessor.connect(mute);
|
|
mute.connect(ctx.destination);
|
|
return true;
|
|
} catch (err) {
|
|
console.error('Error starting mic:', err);
|
|
alert('Mic access failed: ' + err.message);
|
|
return false;
|
|
}
|
|
}
|
|
|
|
function stopMic() {
|
|
if (micStream) {
|
|
micStream.getTracks().forEach(t => t.stop());
|
|
micStream = null;
|
|
}
|
|
if (micSource) {
|
|
micSource.disconnect();
|
|
micSource = null;
|
|
}
|
|
if (micScriptProcessor) {
|
|
micScriptProcessor.onaudioprocess = null;
|
|
micScriptProcessor.disconnect();
|
|
micScriptProcessor = null;
|
|
}
|
|
}
|
|
|
|
async function startCam() {
|
|
try {
|
|
// Backend now handles capture. We just wait for "local" stream.
|
|
// But we might want to tell backend to start?
|
|
// Currently backend starts on TUI command /cam.
|
|
// Ideally we should have a /cam endpoint or message?
|
|
// For now, this button is purely cosmetic if backend is controlled via TUI,
|
|
// OR we implemented the /cam command in TUI.
|
|
// User asked to Encode on Backend.
|
|
// Let's assume user uses TUI /cam or we trigger it via existing mechanism?
|
|
// Wait, start_web_server doesn't listen to commands from web.
|
|
// The buttons in web UI were starting *browser* capture.
|
|
|
|
// If we want the Web UI button to start backend capture, we need an endpoint.
|
|
// Since we don't have one easily, let's just show a message or assume TUI control.
|
|
// BUT, the existing implementation (VoiceChat::start_web) was triggered by TUI.
|
|
// The Web UI was just sending data.
|
|
|
|
// Actually, previous flow was:
|
|
// 1. TUI /cam -> calls toggle_camera
|
|
// 2. toggle_camera -> calls VideoCapture::start_web -> spawns task receiving from channel
|
|
// 3. Web UI -> captures video -> sends to WS -> WS handler sends to channel
|
|
|
|
// New flow:
|
|
// 1. TUI /cam -> calls toggle_camera -> calls Start Native -> Spawns ffmpeg -> sends to broadcast
|
|
// 2. Web UI -> receives broadcast -> renders
|
|
|
|
// So the "Start Camera" button in Web UI is now USELESS/Misleading if it tries to do browser capture.
|
|
// It should probably be removed or replaced with "Status".
|
|
alert("Please use /cam in the terminal to start the camera (Backend Encoding).");
|
|
|
|
} catch (err) {
|
|
console.error('Error starting camera:', err);
|
|
alert('Failed to start camera');
|
|
updateButton(toggleCamBtn, false, 'videocam', 'videocam_off');
|
|
}
|
|
}
|
|
|
|
function stopCam() {
|
|
if (camStream) {
|
|
camStream.getTracks().forEach(t => t.stop());
|
|
camStream = null;
|
|
}
|
|
if (videoEncoder) {
|
|
// We don't close the encoder, just stop feeding it?
|
|
// Or re-create it? Let's keep it but stop the reader loop which is tied to the track.
|
|
// Actually, send a config reset next time?
|
|
}
|
|
}
|
|
|
|
//let screenStream = null;
|
|
|
|
// Helper to read frames from the stream
|
|
async function readLoop(reader, encoder) {
|
|
while (true) {
|
|
const { done, value } = await reader.read();
|
|
if (done) break;
|
|
if (encoder.state === "configured") {
|
|
encoder.encode(value);
|
|
value.close();
|
|
} else {
|
|
value.close();
|
|
}
|
|
}
|
|
}
|
|
|
|
async function startScreen() {
|
|
try {
|
|
// Hybrid Mode: Browser Capture + Backend Relay
|
|
screenStream = await navigator.mediaDevices.getDisplayMedia({
|
|
video: {
|
|
cursor: "always"
|
|
},
|
|
audio: false
|
|
});
|
|
|
|
const track = screenStream.getVideoTracks()[0];
|
|
const { width, height } = track.getSettings();
|
|
|
|
// 1. Setup Local Preview (Draw to Canvas)
|
|
const localCardObj = getOrCreateCard({ id: 'local' }, 'screen');
|
|
const canvas = localCardObj.canvas;
|
|
const ctx = canvas.getContext('2d');
|
|
|
|
// Create a temp video element to play the stream for drawing
|
|
const tempVideo = document.createElement('video');
|
|
tempVideo.autoplay = true;
|
|
tempVideo.srcObject = screenStream;
|
|
tempVideo.muted = true;
|
|
await tempVideo.play();
|
|
|
|
// Canvas drawing loop
|
|
function drawLoop() {
|
|
if (tempVideo.paused || tempVideo.ended) return;
|
|
if (canvas.width !== tempVideo.videoWidth || canvas.height !== tempVideo.videoHeight) {
|
|
canvas.width = tempVideo.videoWidth;
|
|
canvas.height = tempVideo.videoHeight;
|
|
}
|
|
ctx.drawImage(tempVideo, 0, 0);
|
|
screenCanvasLoop = requestAnimationFrame(drawLoop);
|
|
}
|
|
drawLoop();
|
|
|
|
// 2. Encode and Send to Backend (for Peers)
|
|
|
|
// Config H.264 Encoder
|
|
screenEncoder = new VideoEncoder({
|
|
output: (chunk, metadata) => {
|
|
const buffer = new Uint8Array(chunk.byteLength);
|
|
chunk.copyTo(buffer);
|
|
|
|
// Construct Header: [5 (Screen)] [FrameType] [Data]
|
|
// Chunk type: key=1, delta=0? No, EncodedVideoChunkType key/delta
|
|
const isKey = chunk.type === 'key';
|
|
const frameType = isKey ? 0 : 1;
|
|
|
|
const payload = new Uint8Array(1 + 1 + buffer.length);
|
|
payload[0] = 5; // Screen Header
|
|
payload[1] = frameType;
|
|
payload.set(buffer, 2);
|
|
|
|
if (ws && ws.readyState === WebSocket.OPEN) {
|
|
ws.send(payload);
|
|
}
|
|
},
|
|
error: (e) => console.error("Screen Encoder Error:", e)
|
|
});
|
|
|
|
screenEncoder.configure({
|
|
codec: 'avc1.42E01E', // H.264 Baseline
|
|
width: width,
|
|
height: height,
|
|
bitrate: 3_000_000, // 3Mbps
|
|
framerate: 30
|
|
});
|
|
|
|
// Reader
|
|
const processor = new MediaStreamTrackProcessor({ track });
|
|
const reader = processor.readable.getReader();
|
|
|
|
readLoop(reader, screenEncoder);
|
|
|
|
updateButton(toggleScreenBtn, true, 'stop_screen_share', 'stop_screen_share');
|
|
|
|
// Clean up on stop
|
|
track.onended = () => stopScreen();
|
|
|
|
} catch (err) {
|
|
console.error('Error starting screen:', err);
|
|
alert(`Failed to start screen share: ${err.message}. \n(Make sure to run /screen in terminal first!)`);
|
|
updateButton(toggleScreenBtn, false, 'screen_share', 'screen_share');
|
|
}
|
|
}
|
|
|
|
async function stopScreen() {
|
|
if (screenStream) {
|
|
screenStream.getTracks().forEach(t => t.stop());
|
|
screenStream = null;
|
|
}
|
|
if (screenEncoder) {
|
|
screenEncoder.close();
|
|
screenEncoder = null;
|
|
}
|
|
if (screenCanvasLoop) {
|
|
cancelAnimationFrame(screenCanvasLoop);
|
|
screenCanvasLoop = null;
|
|
}
|
|
const localCardObj = getOrCreateCard({ id: 'local' }, 'screen');
|
|
const ctx = localCardObj.canvas.getContext('2d');
|
|
ctx.clearRect(0, 0, localCardObj.canvas.width, localCardObj.canvas.height);
|
|
|
|
updateButton(toggleScreenBtn, false, 'screen_share', 'screen_share');
|
|
}
|
|
|
|
|
|
|