diff --git a/client/index.html b/client/index.html
index 552ed77..8f03a78 100644
--- a/client/index.html
+++ b/client/index.html
@@ -57,11 +57,12 @@
+
-
+
diff --git a/client/js/app.js b/client/js/app.js
index 425856d..bab06a6 100644
--- a/client/js/app.js
+++ b/client/js/app.js
@@ -9,14 +9,24 @@ const seenEmitted = new Set(); // item IDs this session has already emitted seen
// BROADCAST STATE
let isBroadcasting = false;
let broadcastStream = null;
-let broadcastCanvas = null;
-let broadcastCtx = null;
-let broadcastInterval = null;
-let lastFrameData = null;
let broadcastChannel = 'general';
-let viewerCount = 0;
let audioCtx = null;
+// WebRTC — broadcaster maintains one peer connection per viewer
+// key: viewerId (socket id), value: RTCPeerConnection
+const peerConnections = new Map();
+
+// WebRTC — viewer holds a single peer connection to broadcaster
+let viewerPeerConnection = null;
+let broadcasterId = null;
+
+const STUN_SERVERS = {
+ iceServers: [
+ { urls: 'stun:stun.l.google.com:19302' },
+ { urls: 'stun:stun1.l.google.com:19302' }
+ ]
+};
+
const seenObserver = new IntersectionObserver((entries) => {
entries.forEach(entry => {
if (!entry.isIntersecting) return;
@@ -1695,13 +1705,11 @@ function closeAllBottomSheets() {
// ============================================================
async function startBroadcast() {
- // Check secure context first
if (!window.isSecureContext) {
showBroadcastSecureWarning();
return;
}
- // Check if someone else is already broadcasting
const statusRes = await fetch('/broadcast/status');
const status = await statusRes.json();
if (status.live) {
@@ -1709,24 +1717,27 @@ async function startBroadcast() {
return;
}
- // Ask which channel to save captures to
const channelNames = channels.map(c => c.name);
- const chosen = channelNames.includes('general') ? 'general' : channelNames[0];
- broadcastChannel = chosen;
+ broadcastChannel = channelNames.includes('general') ? 'general' : channelNames[0];
- // Request screen capture
let stream;
try {
stream = await navigator.mediaDevices.getDisplayMedia({
- video: { frameRate: 2 },
- audio: false
+ video: { frameRate: { ideal: 30 }, width: { ideal: 1920 }, height: { ideal: 1080 } },
+ audio: true
});
+
+ // Mix in microphone if available — failure is silent, broadcast continues without mic
+ try {
+ const micStream = await navigator.mediaDevices.getUserMedia({ audio: true });
+ micStream.getAudioTracks().forEach(track => stream.addTrack(track));
+ } catch (e) {
+ // Mic denied or unavailable — continue without it
+ }
} catch (e) {
- // User cancelled or permission denied — silent exit
return;
}
- // Start broadcast on server
const startRes = await fetch('/broadcast/start', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
@@ -1743,21 +1754,11 @@ async function startBroadcast() {
broadcastStream = stream;
isBroadcasting = true;
- // Warm up AudioContext while we have a user gesture
try {
audioCtx = new (window.AudioContext || window.webkitAudioContext)();
await audioCtx.resume();
} catch (e) { }
- // Set up canvas for frame capture
- broadcastCanvas = document.createElement('canvas');
- broadcastCtx = broadcastCanvas.getContext('2d');
-
- const video = document.createElement('video');
- video.srcObject = stream;
- video.play();
-
- // Update start button
const startBtn = document.getElementById('startBroadcastBtn');
if (startBtn) {
startBtn.textContent = '⏹ Stop';
@@ -1765,23 +1766,6 @@ async function startBroadcast() {
startBtn.onclick = stopBroadcast;
}
- // Frame capture loop — change detection, ~2fps
- broadcastInterval = setInterval(() => {
- if (!video.videoWidth) return;
- broadcastCanvas.width = Math.min(video.videoWidth, 1280);
- broadcastCanvas.height = Math.min(video.videoHeight, 720);
- broadcastCtx.drawImage(video, 0, 0, broadcastCanvas.width, broadcastCanvas.height);
-
- const frame = broadcastCanvas.toDataURL('image/jpeg', 0.5);
-
- // Change detection — skip if frame is identical to last
- if (frame === lastFrameData) return;
- lastFrameData = frame;
-
- socket.emit('broadcast-frame', { frame });
- }, 500); // every 500ms = ~2fps
-
- // If user stops sharing via browser UI (clicks "Stop sharing")
stream.getVideoTracks()[0].addEventListener('ended', () => {
stopBroadcast();
});
@@ -1792,30 +1776,57 @@ async function stopBroadcast() {
isBroadcasting = false;
- // Stop the capture loop
- if (broadcastInterval) {
- clearInterval(broadcastInterval);
- broadcastInterval = null;
+ // Reset UI immediately — before any async calls that might fail
+ const startBtn = document.getElementById('startBroadcastBtn');
+ if (startBtn) {
+ startBtn.textContent = '📡 Broadcast';
+ startBtn.classList.remove('is-live');
+ startBtn.onclick = startBroadcast;
}
- // Stop all tracks
+ // Close all peer connections to viewers
+ peerConnections.forEach(pc => pc.close());
+ peerConnections.clear();
+
if (broadcastStream) {
broadcastStream.getTracks().forEach(t => t.stop());
broadcastStream = null;
}
- lastFrameData = null;
+ // Best effort — server may already be down
+ try { await fetch('/broadcast/end', { method: 'POST' }); } catch (e) { }
+}
- // Tell server broadcast is over
- await fetch('/broadcast/end', { method: 'POST' });
+// Called when broadcaster gets notified a new viewer joined
+async function handleViewerJoined(viewerId) {
+ if (!broadcastStream) return;
- // Reset start button
- const startBtn = document.getElementById('startBroadcastBtn');
- if (startBtn) {
- startBtn.textContent = '📡 Broadcast';
- startBtn.classList.remove('is-live');
- startBtn.onclick = startBroadcast;
- }
+ const pc = new RTCPeerConnection(STUN_SERVERS);
+ peerConnections.set(viewerId, pc);
+
+ // Add all tracks from the screen share stream
+ broadcastStream.getTracks().forEach(track => {
+ pc.addTrack(track, broadcastStream);
+ });
+
+ // Send ICE candidates to this viewer as they're discovered
+ pc.onicecandidate = ({ candidate }) => {
+ if (candidate) {
+ socket.emit('webrtc-ice', { candidate, targetId: viewerId });
+ }
+ };
+
+ pc.onconnectionstatechange = () => {
+ if (pc.connectionState === 'disconnected' || pc.connectionState === 'failed') {
+ pc.close();
+ peerConnections.delete(viewerId);
+ }
+ };
+
+ // Create and send offer to viewer
+ const offer = await pc.createOffer();
+ await pc.setLocalDescription(offer);
+ socket.emit('webrtc-offer', { offer, viewerId });
}
function showBroadcastBar(data) {
@@ -1860,14 +1871,16 @@ function joinBroadcast() {
label.textContent = `${document.getElementById('broadcastLabel').textContent}`;
panel.style.display = 'flex';
+ // Hide join button while viewing
+ const joinBtn = document.getElementById('broadcastJoinBtn');
+ if (joinBtn) joinBtn.style.display = 'none';
+
// Reset position to default top-right on each join
panel.style.left = '';
panel.style.top = '';
panel.style.right = '20px';
makeDraggable(panel);
-
- // Tell server we joined — get last frame immediately
socket.emit('broadcast-join');
}
@@ -1875,22 +1888,48 @@ function leaveBroadcast() {
const panel = document.getElementById('viewerPanel');
if (panel) panel.style.display = 'none';
- // Clear filmstrip
- const filmstrip = document.getElementById('filmstrip');
- if (filmstrip) filmstrip.innerHTML = '';
-}
+ // Restore join button
+ const joinBtn = document.getElementById('broadcastJoinBtn');
+ if (joinBtn) joinBtn.style.display = 'inline-block';
-async function captureToChannel() {
- const frame = document.getElementById('viewerFrame');
- if (!frame || !frame.src || frame.src === window.location.href) return;
+ // Reset audio button to muted state
+ const audioBtn = document.getElementById('audioToggleBtn');
+ if (audioBtn) {
+ audioBtn.textContent = '🔇';
+ audioBtn.title = 'Unmute audio';
+ }
- // Convert data URL to blob
- const res = await fetch(frame.src);
- const blob = await res.blob();
- const file = new File([blob], `broadcast-${Date.now()}.jpg`, { type: 'image/jpeg' });
+ // Clean up viewer peer connection
+ if (viewerPeerConnection) {
+ viewerPeerConnection.close();
+ viewerPeerConnection = null;
+ }
+ broadcasterId = null;
- uploadFiles([file], broadcastChannel);
+ // Stop video stream and reset audio on viewer element
+ const video = document.getElementById('viewerFrame');
+ if (video) {
+ video.srcObject = null;
+ video.muted = true;
+ }
+}
+async function captureToChannel() {
+ const video = document.getElementById('viewerFrame');
+ if (!video || !video.srcObject) return;
+
+ // Draw current video frame to canvas and convert to blob
+ const canvas = document.createElement('canvas');
+ canvas.width = video.videoWidth || 1280;
+ canvas.height = video.videoHeight || 720;
+ const ctx = canvas.getContext('2d');
+ ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
+
+ canvas.toBlob((blob) => {
+ if (!blob) return;
+ const file = new File([blob], `broadcast-${Date.now()}.jpg`, { type: 'image/jpeg' });
+ uploadFiles([file], broadcastChannel);
+ }, 'image/jpeg', 0.9);
}
function raiseHand() {
@@ -1930,6 +1969,16 @@ function toggleMinimize() {
panel.classList.toggle('minimized');
}
+function toggleAudio() {
+ const video = document.getElementById('viewerFrame');
+ const btn = document.getElementById('audioToggleBtn');
+ if (!video || !btn) return;
+
+ video.muted = !video.muted;
+ btn.textContent = video.muted ? '🔇' : '🔊';
+ btn.title = video.muted ? 'Unmute audio' : 'Mute audio';
+}
+
function makeDraggable(panel) {
const header = panel.querySelector('.viewer-panel-header');
if (!header) return;
@@ -1989,7 +2038,6 @@ socket.on('broadcast-ended', () => {
hideBroadcastBar();
leaveBroadcast();
- // Reset start button if we were the broadcaster
if (isBroadcasting) {
isBroadcasting = false;
const startBtn = document.getElementById('startBroadcastBtn');
@@ -2001,17 +2049,113 @@ socket.on('broadcast-ended', () => {
}
});
-socket.on('broadcast-frame', (data) => {
- const panel = document.getElementById('viewerPanel');
- if (!panel || panel.style.display === 'none') return;
+// ─── WEBRTC — BROADCASTER SIDE ───────────────────────────────
+
+// A new viewer joined — broadcaster initiates peer connection
+socket.on('webrtc-viewer-joined', ({ viewerId }) => {
+ if (!isBroadcasting) return;
+ handleViewerJoined(viewerId);
+});
+
+// Broadcaster receives answer from a viewer
+socket.on('webrtc-answer', async ({ answer, viewerId }) => {
+ const pc = peerConnections.get(viewerId);
+ if (!pc) return;
+ try {
+ await pc.setRemoteDescription(new RTCSessionDescription(answer));
+ } catch (e) {
+ console.error('Error setting remote description on broadcaster:', e);
+ }
+});
+
+// ─── WEBRTC — VIEWER SIDE ────────────────────────────────────
+
+// Viewer receives offer from broadcaster
+socket.on('webrtc-offer', async ({ offer, broadcasterId: bid }) => {
+ broadcasterId = bid;
- const viewer = document.getElementById('viewerFrame');
- if (viewer) viewer.src = data.frame;
+ // Clean up any existing peer connection
+ if (viewerPeerConnection) {
+ viewerPeerConnection.close();
+ viewerPeerConnection = null;
+ }
+
+ const pc = new RTCPeerConnection(STUN_SERVERS);
+ viewerPeerConnection = pc;
+
+ // When stream arrives, attach to video element
+ pc.ontrack = ({ streams }) => {
+ const video = document.getElementById('viewerFrame');
+ if (video && streams[0]) {
+ video.srcObject = streams[0];
+ }
+ };
+
+ // Send ICE candidates back to broadcaster
+ pc.onicecandidate = ({ candidate }) => {
+ if (candidate) {
+ socket.emit('webrtc-ice', { candidate, targetId: broadcasterId });
+ }
+ };
+
+ pc.onconnectionstatechange = () => {
+ if (pc.connectionState === 'disconnected' || pc.connectionState === 'failed') {
+ pc.close();
+ viewerPeerConnection = null;
+ }
+ };
+ await pc.setRemoteDescription(new RTCSessionDescription(offer));
+ const answer = await pc.createAnswer();
+ await pc.setLocalDescription(answer);
+ socket.emit('webrtc-answer', { answer, broadcasterId });
});
+// ICE candidates — both broadcaster and viewer use same handler
+socket.on('webrtc-ice', async ({ candidate, fromId }) => {
+ // Broadcaster receives from viewer
+ if (isBroadcasting) {
+ const pc = peerConnections.get(fromId);
+ if (pc) {
+ try { await pc.addIceCandidate(new RTCIceCandidate(candidate)); } catch (e) { }
+ }
+ return;
+ }
+
+ // Viewer receives from broadcaster
+ if (viewerPeerConnection) {
+ try { await viewerPeerConnection.addIceCandidate(new RTCIceCandidate(candidate)); } catch (e) { }
+ }
+});
+
+socket.on('disconnect', () => {
+ // Trigger the same cleanup flow as a normal broadcast-ended event
+ if (isBroadcasting || viewerPeerConnection) {
+ isBroadcasting = false;
+
+ peerConnections.forEach(pc => pc.close());
+ peerConnections.clear();
+
+ if (broadcastStream) {
+ broadcastStream.getTracks().forEach(t => t.stop());
+ broadcastStream = null;
+ }
+
+ const startBtn = document.getElementById('startBroadcastBtn');
+ if (startBtn) {
+ startBtn.textContent = '📡 Broadcast';
+ startBtn.classList.remove('is-live');
+ startBtn.onclick = startBroadcast;
+ }
+
+ hideBroadcastBar();
+ leaveBroadcast();
+ }
+});
+
+// ─── RAISE HAND ──────────────────────────────────────────────
+
socket.on('broadcast-reaction-received', ({ from }) => {
- // Only show to broadcaster
if (!isBroadcasting) return;
playChime();
diff --git a/server/server.js b/server/server.js
index a60dbb9..fff1ff8 100644
--- a/server/server.js
+++ b/server/server.js
@@ -955,24 +955,43 @@ io.on("connection", (socket) => {
io.emit("seen-update", { id, count });
});
- // Broadcast — frame relay
- socket.on("broadcast-frame", (data) => {
+ // WebRTC — viewer joins, notify broadcaster to initiate peer connection
+ socket.on("broadcast-join", () => {
if (!currentBroadcast) return;
- currentBroadcast.lastFrame = data.frame;
- // relay to everyone except the sender
- socket.broadcast.emit("broadcast-frame", { frame: data.frame });
+ // tell broadcaster a new viewer has joined, pass viewer's socket id
+ const broadcasterSocket = io.sockets.sockets.get(currentBroadcast.socketId);
+ if (broadcasterSocket) {
+ broadcasterSocket.emit("webrtc-viewer-joined", { viewerId: socket.id });
+ }
});
- // Broadcast — viewer joins, send last frame immediately
- socket.on("broadcast-join", () => {
- if (!currentBroadcast || !currentBroadcast.lastFrame) return;
- socket.emit("broadcast-frame", { frame: currentBroadcast.lastFrame });
+ // WebRTC — broadcaster sends offer to a specific viewer
+ socket.on("webrtc-offer", ({ offer, viewerId }) => {
+ const viewerSocket = io.sockets.sockets.get(viewerId);
+ if (viewerSocket) {
+ viewerSocket.emit("webrtc-offer", { offer, broadcasterId: socket.id });
+ }
+ });
+
+ // WebRTC — viewer sends answer back to broadcaster
+ socket.on("webrtc-answer", ({ answer, broadcasterId }) => {
+ const broadcasterSocket = io.sockets.sockets.get(broadcasterId);
+ if (broadcasterSocket) {
+ broadcasterSocket.emit("webrtc-answer", { answer, viewerId: socket.id });
+ }
+ });
+
+ // WebRTC — ICE candidate exchange, both directions
+ socket.on("webrtc-ice", ({ candidate, targetId }) => {
+ const targetSocket = io.sockets.sockets.get(targetId);
+ if (targetSocket) {
+ targetSocket.emit("webrtc-ice", { candidate, fromId: socket.id });
+ }
});
- // Broadcast — raise hand, relay only to broadcaster
+ // Broadcast — raise hand, relay to broadcaster
socket.on("broadcast-reaction", ({ from }) => {
if (!currentBroadcast) return;
- // find broadcaster's socket and emit only to them
io.emit("broadcast-reaction-received", { from });
});