370 lines
11 KiB
JavaScript
370 lines
11 KiB
JavaScript
const toggleMicBtn = document.getElementById('toggle-mic');
|
|
const toggleCamBtn = document.getElementById('toggle-cam');
|
|
const toggleScreenBtn = document.getElementById('toggle-screen');
|
|
const statusOverlay = document.getElementById('status-overlay');
|
|
const connectionStatus = document.getElementById('connection-status');
|
|
const videoGrid = document.getElementById('video-grid');
|
|
const localVideo = document.getElementById('local-video');
|
|
|
|
// --- Local Media State ---
|
|
let micStream = null;
|
|
let micSource = null;
|
|
let camStream = null;
|
|
let screenStream = null;
|
|
let micScriptProcessor = null;
|
|
let audioCtx = null;
|
|
const SAMPLE_RATE = 48000;
|
|
|
|
// --- Remote Peer State ---
|
|
// Map<peerId, {
|
|
// id: string,
|
|
// nextStartTime: number,
|
|
// cam: { card: HTMLElement, img: HTMLElement, status: HTMLElement } | null,
|
|
// screen: { card: HTMLElement, img: HTMLElement, status: HTMLElement } | null,
|
|
// }>
|
|
const peers = new Map();
|
|
|
|
// Initialize shared AudioContext for playback
|
|
function getAudioContext() {
|
|
if (!audioCtx) {
|
|
audioCtx = new (window.AudioContext || window.webkitAudioContext)({
|
|
sampleRate: SAMPLE_RATE,
|
|
});
|
|
}
|
|
if (audioCtx.state === 'suspended') {
|
|
audioCtx.resume();
|
|
}
|
|
return audioCtx;
|
|
}
|
|
|
|
// --- WebSocket Setup ---
|
|
const ws = new WebSocket(`ws://${location.host}/ws`);
|
|
ws.binaryType = 'arraybuffer';
|
|
|
|
ws.onopen = () => {
|
|
statusOverlay.style.display = 'none';
|
|
connectionStatus.innerHTML = '<span class="material-icons">wifi</span>';
|
|
connectionStatus.classList.add('connected');
|
|
connectionStatus.title = "Connected";
|
|
};
|
|
|
|
ws.onclose = () => {
|
|
statusOverlay.style.display = 'flex';
|
|
statusOverlay.querySelector('h2').textContent = "Disconnected. Reconnecting...";
|
|
connectionStatus.innerHTML = '<span class="material-icons">wifi_off</span>';
|
|
connectionStatus.classList.remove('connected');
|
|
connectionStatus.title = "Disconnected";
|
|
};
|
|
|
|
ws.onmessage = (event) => {
|
|
const data = event.data;
|
|
if (data instanceof ArrayBuffer) {
|
|
const view = new DataView(data);
|
|
if (view.byteLength < 2) return;
|
|
|
|
const header = view.getUint8(0);
|
|
const idLen = view.getUint8(1);
|
|
|
|
if (view.byteLength < 2 + idLen) return;
|
|
|
|
// Extract ID
|
|
const idBytes = new Uint8Array(data, 2, idLen);
|
|
const peerId = new TextDecoder().decode(idBytes);
|
|
|
|
// Extract Payload
|
|
const payload = data.slice(2 + idLen);
|
|
|
|
// Get or Create Peer
|
|
let peer = peers.get(peerId);
|
|
if (!peer) {
|
|
peer = {
|
|
id: peerId,
|
|
nextStartTime: 0,
|
|
cam: null,
|
|
screen: null
|
|
};
|
|
peers.set(peerId, peer);
|
|
}
|
|
|
|
if (header === 0) { // Audio
|
|
handleRemoteAudio(peer, payload);
|
|
} else if (header === 1) { // Video (Camera)
|
|
handleRemoteVideo(peer, payload, 'cam');
|
|
} else if (header === 2) { // Screen
|
|
handleRemoteVideo(peer, payload, 'screen'); // Treat screen separate
|
|
}
|
|
}
|
|
};
|
|
|
|
function getOrCreateCard(peer, type) {
|
|
if (peer[type]) return peer[type];
|
|
|
|
const card = document.createElement('div');
|
|
card.className = 'peer-card';
|
|
card.id = `peer-${peer.id}-${type}`;
|
|
|
|
// Video/Image element
|
|
const img = document.createElement('img');
|
|
img.className = 'peer-video';
|
|
img.alt = `${type} from ${peer.id}`;
|
|
card.appendChild(img);
|
|
|
|
// Overlay info
|
|
const info = document.createElement('div');
|
|
info.className = 'peer-info';
|
|
let label = peer.id.substring(0, 8);
|
|
if (type === 'screen') label += " (Screen)";
|
|
|
|
info.innerHTML = `
|
|
<div class="peer-status" id="status-${peer.id}-${type}"></div>
|
|
<span class="peer-name">${label}</span>
|
|
`;
|
|
card.appendChild(info);
|
|
|
|
videoGrid.appendChild(card);
|
|
|
|
const cardObj = {
|
|
card: card,
|
|
imgElement: img,
|
|
statusElement: info.querySelector('.peer-status'),
|
|
activityTimeout: null
|
|
};
|
|
|
|
peer[type] = cardObj;
|
|
return cardObj;
|
|
}
|
|
|
|
function handleRemoteAudio(peer, arrayBuffer) {
|
|
// Audio usually associated with Camera card. If no cam card, create one (placeholder).
|
|
const cardObj = getOrCreateCard(peer, 'cam');
|
|
|
|
const ctx = getAudioContext();
|
|
const float32Data = new Float32Array(arrayBuffer);
|
|
const buffer = ctx.createBuffer(1, float32Data.length, SAMPLE_RATE);
|
|
buffer.copyToChannel(float32Data, 0);
|
|
|
|
const source = ctx.createBufferSource();
|
|
source.buffer = buffer;
|
|
source.connect(ctx.destination);
|
|
|
|
const now = ctx.currentTime;
|
|
if (peer.nextStartTime < now) {
|
|
peer.nextStartTime = now + 0.02;
|
|
}
|
|
// Latency catch-up
|
|
if (peer.nextStartTime > now + 0.5) {
|
|
peer.nextStartTime = now + 0.02;
|
|
}
|
|
|
|
source.start(peer.nextStartTime);
|
|
peer.nextStartTime += buffer.duration;
|
|
|
|
// Visual indicator
|
|
updatePeerActivity(cardObj, true);
|
|
}
|
|
|
|
function handleRemoteVideo(peer, arrayBuffer, type) {
|
|
const cardObj = getOrCreateCard(peer, type);
|
|
|
|
const blob = new Blob([arrayBuffer], { type: 'image/webp' });
|
|
const url = URL.createObjectURL(blob);
|
|
|
|
const prevUrl = cardObj.imgElement.src;
|
|
cardObj.imgElement.onload = () => {
|
|
if (prevUrl && prevUrl.startsWith('blob:')) {
|
|
URL.revokeObjectURL(prevUrl);
|
|
}
|
|
};
|
|
cardObj.imgElement.src = url;
|
|
|
|
updatePeerActivity(cardObj, false);
|
|
}
|
|
|
|
function updatePeerActivity(cardObj, isAudio) {
|
|
if (isAudio) {
|
|
cardObj.statusElement.classList.add('speaking');
|
|
// Debounce removal
|
|
if (cardObj.activityTimeout) clearTimeout(cardObj.activityTimeout);
|
|
cardObj.activityTimeout = setTimeout(() => {
|
|
cardObj.statusElement.classList.remove('speaking');
|
|
}, 200);
|
|
}
|
|
}
|
|
|
|
// --- Local Capture Controls ---
|
|
|
|
function updateButton(btn, active, iconOn, iconOff) {
|
|
const iconSpan = btn.querySelector('.material-icons');
|
|
if (active) {
|
|
btn.classList.add('active');
|
|
// btn.classList.remove('danger'); // Optional: use danger for stop?
|
|
iconSpan.textContent = iconOn;
|
|
} else {
|
|
btn.classList.remove('active');
|
|
iconSpan.textContent = iconOff;
|
|
}
|
|
}
|
|
|
|
toggleMicBtn.addEventListener('click', async () => {
|
|
if (micStream) {
|
|
stopMic();
|
|
updateButton(toggleMicBtn, false, 'mic', 'mic_off');
|
|
} else {
|
|
const success = await startMic();
|
|
if (success) {
|
|
updateButton(toggleMicBtn, true, 'mic', 'mic_off');
|
|
}
|
|
}
|
|
});
|
|
|
|
toggleCamBtn.addEventListener('click', async () => {
|
|
if (camStream) {
|
|
stopCam();
|
|
updateButton(toggleCamBtn, false, 'videocam', 'videocam_off');
|
|
localVideo.srcObject = null;
|
|
} else {
|
|
await startCam();
|
|
updateButton(toggleCamBtn, true, 'videocam', 'videocam_off');
|
|
}
|
|
});
|
|
|
|
toggleScreenBtn.addEventListener('click', async () => {
|
|
if (screenStream) {
|
|
stopScreen();
|
|
updateButton(toggleScreenBtn, false, 'screen_share', 'screen_share');
|
|
// Restore cam if active?
|
|
if (camStream) localVideo.srcObject = camStream;
|
|
else localVideo.srcObject = null;
|
|
} else {
|
|
await startScreen();
|
|
updateButton(toggleScreenBtn, true, 'stop_screen_share', 'screen_share');
|
|
}
|
|
});
|
|
|
|
async function startMic() {
|
|
const ctx = getAudioContext();
|
|
try {
|
|
micStream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
micSource = ctx.createMediaStreamSource(micStream);
|
|
micScriptProcessor = ctx.createScriptProcessor(2048, 1, 1);
|
|
|
|
micScriptProcessor.onaudioprocess = (e) => {
|
|
if (!micStream) return;
|
|
if (ws.readyState === WebSocket.OPEN) {
|
|
const inputData = e.inputBuffer.getChannelData(0);
|
|
const buffer = new ArrayBuffer(1 + inputData.length * 4);
|
|
const view = new DataView(buffer);
|
|
view.setUint8(0, 3); // Header 3 = Mic
|
|
for (let i = 0; i < inputData.length; i++) {
|
|
view.setFloat32(1 + i * 4, inputData[i], true);
|
|
}
|
|
ws.send(buffer);
|
|
}
|
|
};
|
|
|
|
micSource.connect(micScriptProcessor);
|
|
// Mute local feedback
|
|
const mute = ctx.createGain();
|
|
mute.gain.value = 0;
|
|
micScriptProcessor.connect(mute);
|
|
mute.connect(ctx.destination);
|
|
return true;
|
|
} catch (err) {
|
|
console.error('Error starting mic:', err);
|
|
alert('Mic access failed: ' + err.message);
|
|
return false;
|
|
}
|
|
}
|
|
|
|
function stopMic() {
|
|
if (micStream) {
|
|
micStream.getTracks().forEach(t => t.stop());
|
|
micStream = null;
|
|
}
|
|
if (micSource) {
|
|
micSource.disconnect();
|
|
micSource = null;
|
|
}
|
|
if (micScriptProcessor) {
|
|
micScriptProcessor.onaudioprocess = null;
|
|
micScriptProcessor.disconnect();
|
|
micScriptProcessor = null;
|
|
}
|
|
}
|
|
|
|
async function startCam() {
|
|
try {
|
|
camStream = await navigator.mediaDevices.getUserMedia({ video: { width: 640, height: 480 } });
|
|
localVideo.srcObject = camStream;
|
|
startVideoSender(camStream, 4); // 4 = Camera
|
|
} catch (err) {
|
|
console.error('Error starting camera:', err);
|
|
alert('Camera access failed');
|
|
}
|
|
}
|
|
|
|
function stopCam() {
|
|
if (camStream) {
|
|
camStream.getTracks().forEach(t => t.stop());
|
|
camStream = null;
|
|
}
|
|
}
|
|
|
|
async function startScreen() {
|
|
try {
|
|
screenStream = await navigator.mediaDevices.getDisplayMedia({ video: true });
|
|
localVideo.srcObject = screenStream;
|
|
|
|
startVideoSender(screenStream, 5); // 5 = Screen
|
|
screenStream.getVideoTracks()[0].onended = () => {
|
|
stopScreen();
|
|
updateButton(toggleScreenBtn, false, 'screen_share', 'screen_share');
|
|
};
|
|
} catch (err) {
|
|
console.error('Error starting screen:', err);
|
|
}
|
|
}
|
|
|
|
function stopScreen() {
|
|
if (screenStream) {
|
|
screenStream.getTracks().forEach(t => t.stop());
|
|
screenStream = null;
|
|
}
|
|
}
|
|
|
|
function startVideoSender(stream, headerByte) {
|
|
const video = document.createElement('video');
|
|
video.srcObject = stream;
|
|
video.play();
|
|
|
|
const canvas = document.createElement('canvas');
|
|
const ctx = canvas.getContext('2d');
|
|
|
|
const sendFrame = () => {
|
|
if (!stream.active) return;
|
|
if (video.readyState === video.HAVE_ENOUGH_DATA) {
|
|
canvas.width = video.videoWidth;
|
|
canvas.height = video.videoHeight;
|
|
ctx.drawImage(video, 0, 0);
|
|
|
|
canvas.toBlob((blob) => {
|
|
if (!blob) return;
|
|
const reader = new FileReader();
|
|
reader.onloadend = () => {
|
|
if (ws.readyState === WebSocket.OPEN) {
|
|
const arrayBuffer = reader.result;
|
|
const buffer = new ArrayBuffer(1 + arrayBuffer.byteLength);
|
|
const view = new Uint8Array(buffer);
|
|
view[0] = headerByte;
|
|
view.set(new Uint8Array(arrayBuffer), 1);
|
|
ws.send(buffer);
|
|
}
|
|
};
|
|
reader.readAsArrayBuffer(blob);
|
|
}, 'image/webp', 0.6);
|
|
}
|
|
setTimeout(sendFrame, 100); // 10 FPS
|
|
};
|
|
sendFrame();
|
|
}
|