354 lines
11 KiB
JavaScript
354 lines
11 KiB
JavaScript
const toggleMicBtn = document.getElementById('toggle-mic');
|
|
const toggleCamBtn = document.getElementById('toggle-cam');
|
|
const toggleScreenBtn = document.getElementById('toggle-screen');
|
|
const statusEl = document.getElementById('status');
|
|
const remoteStreamsContainer = document.getElementById('remote-streams');
|
|
const localVideo = document.getElementById('local-video');
|
|
|
|
// --- Local Media State ---
|
|
let micStream = null;
|
|
let micSource = null;
|
|
let camStream = null;
|
|
let screenStream = null;
|
|
let micScriptProcessor = null;
|
|
let audioCtx = null;
|
|
const SAMPLE_RATE = 48000;
|
|
|
|
// --- Remote Peer State ---
|
|
// Map<peerId (string), { audioCtx, nextStartTime, videoEl, screenEl }>
|
|
// Note: We can use a single AudioContext for all peers, but need separate scheduler times.
|
|
const peers = new Map();
|
|
|
|
// Initialize shared AudioContext for playback
|
|
function getAudioContext() {
|
|
if (!audioCtx) {
|
|
audioCtx = new (window.AudioContext || window.webkitAudioContext)({
|
|
sampleRate: SAMPLE_RATE,
|
|
});
|
|
}
|
|
if (audioCtx.state === 'suspended') {
|
|
audioCtx.resume();
|
|
}
|
|
return audioCtx;
|
|
}
|
|
|
|
// --- WebSocket Setup ---
|
|
const ws = new WebSocket(`ws://${location.host}/ws`);
|
|
ws.binaryType = 'arraybuffer';
|
|
|
|
ws.onopen = () => {
|
|
statusEl.textContent = 'Connected';
|
|
statusEl.style.color = '#4ade80';
|
|
};
|
|
|
|
ws.onclose = () => {
|
|
statusEl.textContent = 'Disconnected';
|
|
statusEl.style.color = '#f87171';
|
|
};
|
|
|
|
ws.onmessage = (event) => {
|
|
const data = event.data;
|
|
if (data instanceof ArrayBuffer) {
|
|
// [Header(1)][ID_Len(1)][ID...][Data...]
|
|
const view = new DataView(data);
|
|
if (view.byteLength < 2) return;
|
|
|
|
const header = view.getUint8(0);
|
|
const idLen = view.getUint8(1);
|
|
|
|
if (view.byteLength < 2 + idLen) return;
|
|
|
|
// Extract ID
|
|
const idBytes = new Uint8Array(data, 2, idLen);
|
|
const peerId = new TextDecoder().decode(idBytes);
|
|
|
|
// Extract Payload
|
|
const payload = data.slice(2 + idLen);
|
|
|
|
// Get or Create Peer
|
|
let peer = peers.get(peerId);
|
|
if (!peer) {
|
|
peer = createPeer(peerId);
|
|
peers.set(peerId, peer);
|
|
}
|
|
|
|
if (header === 0) { // Audio
|
|
handleRemoteAudio(peer, payload);
|
|
} else if (header === 1) { // Id 1 = Camera
|
|
handleRemoteVideo(peer, payload, 'camera');
|
|
} else if (header === 2) { // Id 2 = Screen
|
|
handleRemoteVideo(peer, payload, 'screen');
|
|
}
|
|
}
|
|
};
|
|
|
|
function createPeer(peerId) {
|
|
// visual card
|
|
const card = document.createElement('div');
|
|
card.className = 'peer-card';
|
|
card.id = `peer-${peerId}`;
|
|
|
|
const header = document.createElement('div');
|
|
header.className = 'peer-header';
|
|
header.innerHTML = `<span>${peerId}</span> <span class="indicators"></span>`;
|
|
card.appendChild(header);
|
|
|
|
// Container for multiple streams
|
|
const mediaContainer = document.createElement('div');
|
|
mediaContainer.className = 'peer-media';
|
|
card.appendChild(mediaContainer);
|
|
|
|
remoteStreamsContainer.appendChild(card);
|
|
|
|
return {
|
|
id: peerId,
|
|
mediaContainer: mediaContainer,
|
|
camImg: null,
|
|
screenImg: null,
|
|
nextStartTime: 0,
|
|
lastActivity: Date.now()
|
|
};
|
|
}
|
|
|
|
function handleRemoteAudio(peer, arrayBuffer) {
|
|
const ctx = getAudioContext();
|
|
const float32Data = new Float32Array(arrayBuffer);
|
|
const buffer = ctx.createBuffer(1, float32Data.length, SAMPLE_RATE);
|
|
buffer.copyToChannel(float32Data, 0);
|
|
|
|
const source = ctx.createBufferSource();
|
|
source.buffer = buffer;
|
|
source.connect(ctx.destination);
|
|
|
|
const now = ctx.currentTime;
|
|
|
|
// Reset if behind
|
|
if (peer.nextStartTime < now) {
|
|
peer.nextStartTime = now + 0.02; // Reduced from 0.05
|
|
}
|
|
|
|
// Cap if too far ahead (latency reduction)
|
|
if (peer.nextStartTime > now + 0.5) {
|
|
console.warn("High latency detected, resetting playhead", peer.nextStartTime - now);
|
|
peer.nextStartTime = now + 0.02;
|
|
// Note: This overlaps/mixes with existing queued buffers, but helps catch up.
|
|
// Ideally we should stop previous sources, but we don't track them yet.
|
|
}
|
|
|
|
source.start(peer.nextStartTime);
|
|
peer.nextStartTime += buffer.duration;
|
|
|
|
// Visual indicator?
|
|
peer.lastActivity = Date.now();
|
|
updatePeerStatus(peer, '🎤');
|
|
}
|
|
|
|
function handleRemoteVideo(peer, arrayBuffer, kind) {
|
|
const blob = new Blob([arrayBuffer], { type: 'image/jpeg' });
|
|
const url = URL.createObjectURL(blob);
|
|
|
|
let img = kind === 'camera' ? peer.camImg : peer.screenImg;
|
|
|
|
if (!img) {
|
|
img = document.createElement('img');
|
|
img.className = kind; // 'camera' or 'screen'
|
|
img.alt = `${kind} from ${peer.id}`;
|
|
peer.mediaContainer.appendChild(img);
|
|
if (kind === 'camera') peer.camImg = img;
|
|
else peer.screenImg = img;
|
|
}
|
|
|
|
const prevUrl = img.src;
|
|
img.onload = () => {
|
|
if (prevUrl && prevUrl.startsWith('blob:')) {
|
|
URL.revokeObjectURL(prevUrl);
|
|
}
|
|
};
|
|
img.src = url;
|
|
|
|
peer.lastActivity = Date.now();
|
|
updatePeerStatus(peer, kind === 'camera' ? '📷' : '🖥');
|
|
}
|
|
|
|
function updatePeerStatus(peer, icon) {
|
|
// optionally update status indicators in header
|
|
}
|
|
|
|
// --- Local Capture Controls ---
|
|
|
|
toggleMicBtn.addEventListener('click', async () => {
|
|
if (micStream) {
|
|
stopMic();
|
|
toggleMicBtn.classList.remove('active');
|
|
toggleMicBtn.textContent = 'Start Microphone';
|
|
} else {
|
|
await startMic();
|
|
toggleMicBtn.classList.add('active');
|
|
toggleMicBtn.textContent = 'Stop Microphone';
|
|
}
|
|
});
|
|
|
|
toggleCamBtn.addEventListener('click', async () => {
|
|
if (camStream) {
|
|
stopCam();
|
|
toggleCamBtn.classList.remove('active');
|
|
toggleCamBtn.textContent = 'Start Camera';
|
|
localVideo.srcObject = null;
|
|
} else {
|
|
await startCam();
|
|
toggleCamBtn.classList.add('active');
|
|
toggleCamBtn.textContent = 'Stop Camera';
|
|
}
|
|
});
|
|
|
|
toggleScreenBtn.addEventListener('click', async () => {
|
|
if (screenStream) {
|
|
stopScreen();
|
|
toggleScreenBtn.classList.remove('active');
|
|
toggleScreenBtn.textContent = 'Start Screen Share';
|
|
localVideo.srcObject = null;
|
|
} else {
|
|
await startScreen();
|
|
toggleScreenBtn.classList.add('active');
|
|
toggleScreenBtn.textContent = 'Stop Screen Share';
|
|
}
|
|
});
|
|
|
|
async function startMic() {
|
|
const ctx = getAudioContext();
|
|
try {
|
|
micStream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
micSource = ctx.createMediaStreamSource(micStream);
|
|
// Use smaller buffer for lower latency (4096 -> 2048 or 1024)
|
|
micScriptProcessor = ctx.createScriptProcessor(2048, 1, 1);
|
|
|
|
micScriptProcessor.onaudioprocess = (e) => {
|
|
if (!micStream) return;
|
|
if (ws.readyState === WebSocket.OPEN) {
|
|
const inputData = e.inputBuffer.getChannelData(0);
|
|
// Send: Header 3 (Mic) + Floats
|
|
// Optimize: direct Float32Array view
|
|
const payloadLen = inputData.length * 4;
|
|
const buffer = new ArrayBuffer(1 + payloadLen);
|
|
const view = new DataView(buffer);
|
|
view.setUint8(0, 3);
|
|
|
|
// Fast copy using typed array constructor if possible, but we need byte offset 1
|
|
// Just use loop for now but with reduced overhead?
|
|
// actually, we can create a Float32Array on the buffer at offset 4 (idx 1)?
|
|
// No, offset must be multiple of element size (4). 1 is not.
|
|
// So we have to copy.
|
|
// But we can use setFloat32 in loop (as before).
|
|
for (let i = 0; i < inputData.length; i++) {
|
|
view.setFloat32(1 + i * 4, inputData[i], true);
|
|
}
|
|
ws.send(buffer);
|
|
}
|
|
};
|
|
|
|
micSource.connect(micScriptProcessor);
|
|
// Mute local feedback
|
|
const mute = ctx.createGain();
|
|
mute.gain.value = 0;
|
|
micScriptProcessor.connect(mute);
|
|
mute.connect(ctx.destination);
|
|
|
|
} catch (err) {
|
|
console.error('Error starting mic:', err);
|
|
}
|
|
}
|
|
|
|
function stopMic() {
|
|
if (micStream) {
|
|
micStream.getTracks().forEach(t => t.stop());
|
|
micStream = null;
|
|
}
|
|
if (micSource) {
|
|
micSource.disconnect();
|
|
micSource = null;
|
|
}
|
|
if (micScriptProcessor) {
|
|
micScriptProcessor.onaudioprocess = null;
|
|
micScriptProcessor.disconnect();
|
|
micScriptProcessor = null;
|
|
}
|
|
}
|
|
|
|
async function startCam() {
|
|
try {
|
|
camStream = await navigator.mediaDevices.getUserMedia({ video: { width: 640, height: 480 } });
|
|
localVideo.srcObject = camStream;
|
|
startVideoSender(camStream, 4);
|
|
} catch (err) {
|
|
console.error('Error starting camera:', err);
|
|
alert('Failed to access camera');
|
|
}
|
|
}
|
|
|
|
function stopCam() {
|
|
if (camStream) {
|
|
camStream.getTracks().forEach(t => t.stop());
|
|
camStream = null;
|
|
}
|
|
}
|
|
|
|
async function startScreen() {
|
|
try {
|
|
screenStream = await navigator.mediaDevices.getDisplayMedia({ video: true });
|
|
// Prioritize screen for local preview if both active?
|
|
localVideo.srcObject = screenStream;
|
|
|
|
startVideoSender(screenStream, 5);
|
|
screenStream.getVideoTracks()[0].onended = () => {
|
|
stopScreen();
|
|
toggleScreenBtn.classList.remove('active');
|
|
toggleScreenBtn.textContent = 'Start Screen Share';
|
|
};
|
|
} catch (err) {
|
|
console.error('Error starting screen:', err);
|
|
}
|
|
}
|
|
|
|
function stopScreen() {
|
|
if (screenStream) {
|
|
screenStream.getTracks().forEach(t => t.stop());
|
|
screenStream = null;
|
|
}
|
|
}
|
|
|
|
function startVideoSender(stream, headerByte) {
|
|
const video = document.createElement('video');
|
|
video.srcObject = stream;
|
|
video.play();
|
|
|
|
const canvas = document.createElement('canvas');
|
|
const ctx = canvas.getContext('2d');
|
|
|
|
const sendFrame = () => {
|
|
if (!stream.active) return;
|
|
if (video.readyState === video.HAVE_ENOUGH_DATA) {
|
|
canvas.width = video.videoWidth;
|
|
canvas.height = video.videoHeight;
|
|
ctx.drawImage(video, 0, 0);
|
|
|
|
canvas.toBlob((blob) => {
|
|
if (!blob) return;
|
|
const reader = new FileReader();
|
|
reader.onloadend = () => {
|
|
if (ws.readyState === WebSocket.OPEN) {
|
|
const arrayBuffer = reader.result;
|
|
const buffer = new ArrayBuffer(1 + arrayBuffer.byteLength);
|
|
const view = new Uint8Array(buffer);
|
|
view[0] = headerByte;
|
|
view.set(new Uint8Array(arrayBuffer), 1);
|
|
ws.send(buffer);
|
|
}
|
|
};
|
|
reader.readAsArrayBuffer(blob);
|
|
}, 'image/jpeg', 0.6);
|
|
}
|
|
setTimeout(sendFrame, 100); // 10 FPS
|
|
};
|
|
sendFrame();
|
|
}
|