feat(streaming): add Screen Streaming plugin with WebRTC

New plugin: browser-based screen sharing via Chrome Screen Capture API.
Multi-stream grid layout (Rustdesk-style tiles) with live previews.

- Server: WebSocket signaling at /ws/streaming (SDP/ICE relay)
- Server: http.createServer for WebSocket attachment
- Frontend: StreamingTab with broadcaster/viewer modes
- Frontend: tile grid, fullscreen viewer, LIVE badges
- Supports multiple concurrent streams
- Peer-to-peer video via WebRTC (no video through server)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Daniel 2026-03-07 00:39:49 +01:00
parent 9ff8a38547
commit 29bcf67121
5 changed files with 1094 additions and 2 deletions

View file

@ -0,0 +1,492 @@
import { useState, useEffect, useRef, useCallback } from 'react';
import './streaming.css';
// ── Types ──
interface StreamInfo {
id: string;
broadcasterName: string;
title: string;
startedAt: string;
viewerCount: number;
}
interface ViewState {
streamId: string;
phase: 'connecting' | 'connected' | 'error';
error?: string;
}
const RTC_CONFIG: RTCConfiguration = {
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' },
{ urls: 'stun:stun1.l.google.com:19302' },
],
};
// ── Elapsed time helper ──
function formatElapsed(startedAt: string): string {
const diff = Math.max(0, Math.floor((Date.now() - new Date(startedAt).getTime()) / 1000));
const h = Math.floor(diff / 3600);
const m = Math.floor((diff % 3600) / 60);
const s = diff % 60;
if (h > 0) return `${h}:${String(m).padStart(2, '0')}:${String(s).padStart(2, '0')}`;
return `${m}:${String(s).padStart(2, '0')}`;
}
// ── Component ──
export default function StreamingTab({ data }: { data: any }) {
// ── State ──
const [streams, setStreams] = useState<StreamInfo[]>([]);
const [userName, setUserName] = useState(() => localStorage.getItem('streaming_name') || '');
const [streamTitle, setStreamTitle] = useState('Screen Share');
const [error, setError] = useState<string | null>(null);
const [myStreamId, setMyStreamId] = useState<string | null>(null);
const [isBroadcasting, setIsBroadcasting] = useState(false);
const [starting, setStarting] = useState(false);
const [viewing, setViewing] = useState<ViewState | null>(null);
const [, setTick] = useState(0); // for elapsed time re-render
// ── Refs ──
const wsRef = useRef<WebSocket | null>(null);
const clientIdRef = useRef<string>('');
const localStreamRef = useRef<MediaStream | null>(null);
const localVideoRef = useRef<HTMLVideoElement | null>(null);
const remoteVideoRef = useRef<HTMLVideoElement | null>(null);
/** Broadcaster: one PeerConnection per viewer */
const peerConnectionsRef = useRef<Map<string, RTCPeerConnection>>(new Map());
/** Viewer: single PeerConnection to broadcaster */
const viewerPcRef = useRef<RTCPeerConnection | null>(null);
const reconnectTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
const reconnectDelayRef = useRef(1000);
// ── Elapsed time ticker ──
useEffect(() => {
const hasActive = streams.length > 0 || isBroadcasting;
if (!hasActive) return;
const iv = setInterval(() => setTick(t => t + 1), 1000);
return () => clearInterval(iv);
}, [streams.length, isBroadcasting]);
// ── SSE data → update stream list ──
useEffect(() => {
if (data?.streams) {
setStreams(data.streams);
}
}, [data]);
// ── Save name to localStorage ──
useEffect(() => {
if (userName) localStorage.setItem('streaming_name', userName);
}, [userName]);
// ── WebSocket connect ──
const connectWs = useCallback(() => {
if (wsRef.current && wsRef.current.readyState === WebSocket.OPEN) return;
const proto = location.protocol === 'https:' ? 'wss' : 'ws';
const ws = new WebSocket(`${proto}://${location.host}/ws/streaming`);
wsRef.current = ws;
ws.onopen = () => {
reconnectDelayRef.current = 1000;
};
ws.onmessage = (ev) => {
let msg: any;
try { msg = JSON.parse(ev.data); } catch { return; }
handleWsMessage(msg);
};
ws.onclose = () => {
wsRef.current = null;
// Auto-reconnect if broadcasting or viewing
if (isBroadcasting || viewing) {
reconnectTimerRef.current = setTimeout(() => {
reconnectDelayRef.current = Math.min(reconnectDelayRef.current * 2, 10000);
connectWs();
}, reconnectDelayRef.current);
}
};
ws.onerror = () => {
ws.close();
};
}, [isBroadcasting, viewing]);
// ── WS message handler ──
const handleWsMessage = useCallback((msg: any) => {
switch (msg.type) {
case 'welcome':
clientIdRef.current = msg.clientId;
if (msg.streams) setStreams(msg.streams);
break;
case 'broadcast_started':
setMyStreamId(msg.streamId);
setIsBroadcasting(true);
setStarting(false);
break;
case 'stream_available':
// SSE will update streams list; this is just a hint
break;
case 'stream_ended':
if (viewing?.streamId === msg.streamId) {
cleanupViewer();
setViewing(null);
}
break;
// ── Broadcaster: viewer joined → create offer ──
case 'viewer_joined': {
const viewerId = msg.viewerId;
const pc = new RTCPeerConnection(RTC_CONFIG);
peerConnectionsRef.current.set(viewerId, pc);
// Add local stream tracks
const stream = localStreamRef.current;
if (stream) {
for (const track of stream.getTracks()) {
pc.addTrack(track, stream);
}
}
pc.onicecandidate = (ev) => {
if (ev.candidate) {
wsSend({ type: 'ice_candidate', targetId: viewerId, candidate: ev.candidate.toJSON() });
}
};
pc.createOffer()
.then(offer => pc.setLocalDescription(offer))
.then(() => {
wsSend({ type: 'offer', targetId: viewerId, sdp: pc.localDescription });
})
.catch(console.error);
break;
}
// ── Broadcaster: viewer left → cleanup ──
case 'viewer_left': {
const pc = peerConnectionsRef.current.get(msg.viewerId);
if (pc) {
pc.close();
peerConnectionsRef.current.delete(msg.viewerId);
}
break;
}
// ── Viewer: received offer from broadcaster ──
case 'offer': {
const pc = new RTCPeerConnection(RTC_CONFIG);
viewerPcRef.current = pc;
pc.ontrack = (ev) => {
if (remoteVideoRef.current && ev.streams[0]) {
remoteVideoRef.current.srcObject = ev.streams[0];
}
setViewing(prev => prev ? { ...prev, phase: 'connected' } : prev);
};
pc.onicecandidate = (ev) => {
if (ev.candidate) {
wsSend({ type: 'ice_candidate', targetId: msg.fromId, candidate: ev.candidate.toJSON() });
}
};
pc.oniceconnectionstatechange = () => {
if (pc.iceConnectionState === 'failed' || pc.iceConnectionState === 'disconnected') {
setViewing(prev => prev ? { ...prev, phase: 'error', error: 'Verbindung verloren' } : prev);
}
};
pc.setRemoteDescription(new RTCSessionDescription(msg.sdp))
.then(() => pc.createAnswer())
.then(answer => pc.setLocalDescription(answer))
.then(() => {
wsSend({ type: 'answer', targetId: msg.fromId, sdp: pc.localDescription });
})
.catch(console.error);
break;
}
// ── Broadcaster: received answer from viewer ──
case 'answer': {
const pc = peerConnectionsRef.current.get(msg.fromId);
if (pc) {
pc.setRemoteDescription(new RTCSessionDescription(msg.sdp)).catch(console.error);
}
break;
}
// ── ICE candidate relay ──
case 'ice_candidate': {
const pc = isBroadcasting
? peerConnectionsRef.current.get(msg.fromId)
: viewerPcRef.current;
if (pc && msg.candidate) {
pc.addIceCandidate(new RTCIceCandidate(msg.candidate)).catch(() => {});
}
break;
}
case 'error':
setError(msg.message);
setStarting(false);
break;
}
}, [isBroadcasting, viewing]);
// ── Send via WS ──
const wsSend = (data: Record<string, any>) => {
if (wsRef.current?.readyState === WebSocket.OPEN) {
wsRef.current.send(JSON.stringify(data));
}
};
// ── Start broadcasting ──
const startBroadcast = useCallback(async () => {
if (!userName.trim()) { setError('Bitte gib einen Namen ein.'); return; }
// Check browser support
if (!navigator.mediaDevices?.getDisplayMedia) {
setError('Dein Browser unterstützt keine Bildschirmfreigabe.');
return;
}
setError(null);
setStarting(true);
try {
const stream = await navigator.mediaDevices.getDisplayMedia({
video: true,
audio: true,
});
localStreamRef.current = stream;
// Show local preview
if (localVideoRef.current) {
localVideoRef.current.srcObject = stream;
}
// Auto-stop when user clicks native "Stop sharing"
stream.getVideoTracks()[0]?.addEventListener('ended', () => {
stopBroadcast();
});
// Connect WS and start broadcast
connectWs();
// Wait for WS to open, then send start_broadcast
const waitForWs = () => {
if (wsRef.current?.readyState === WebSocket.OPEN) {
wsSend({ type: 'start_broadcast', name: userName.trim(), title: streamTitle.trim() || 'Screen Share' });
} else {
setTimeout(waitForWs, 100);
}
};
waitForWs();
} catch (e: any) {
setStarting(false);
if (e.name === 'NotAllowedError') {
setError('Bildschirmfreigabe wurde abgelehnt.');
} else {
setError(`Fehler: ${e.message}`);
}
}
}, [userName, streamTitle, connectWs]);
// ── Stop broadcasting ──
const stopBroadcast = useCallback(() => {
wsSend({ type: 'stop_broadcast' });
// Stop all tracks
localStreamRef.current?.getTracks().forEach(t => t.stop());
localStreamRef.current = null;
if (localVideoRef.current) localVideoRef.current.srcObject = null;
// Close all peer connections
for (const pc of peerConnectionsRef.current.values()) pc.close();
peerConnectionsRef.current.clear();
setIsBroadcasting(false);
setMyStreamId(null);
}, []);
// ── Join as viewer ──
const joinStream = useCallback((streamId: string) => {
setError(null);
setViewing({ streamId, phase: 'connecting' });
connectWs();
const waitForWs = () => {
if (wsRef.current?.readyState === WebSocket.OPEN) {
wsSend({ type: 'join_viewer', name: userName.trim() || 'Viewer', streamId });
} else {
setTimeout(waitForWs, 100);
}
};
waitForWs();
}, [userName, connectWs]);
// ── Leave viewer ──
const cleanupViewer = useCallback(() => {
if (viewerPcRef.current) {
viewerPcRef.current.close();
viewerPcRef.current = null;
}
if (remoteVideoRef.current) remoteVideoRef.current.srcObject = null;
}, []);
const leaveViewing = useCallback(() => {
wsSend({ type: 'leave_viewer' });
cleanupViewer();
setViewing(null);
}, [cleanupViewer]);
// ── Cleanup on unmount ──
useEffect(() => {
return () => {
localStreamRef.current?.getTracks().forEach(t => t.stop());
for (const pc of peerConnectionsRef.current.values()) pc.close();
if (viewerPcRef.current) viewerPcRef.current.close();
if (wsRef.current) wsRef.current.close();
if (reconnectTimerRef.current) clearTimeout(reconnectTimerRef.current);
};
}, []);
// ── Render ──
// Fullscreen viewer overlay
if (viewing) {
const stream = streams.find(s => s.id === viewing.streamId);
return (
<div className="stream-viewer-overlay">
<div className="stream-viewer-header">
<div className="stream-viewer-header-left">
<span className="stream-live-badge"><span className="stream-live-dot" /> LIVE</span>
<div>
<div className="stream-viewer-title">{stream?.title || 'Stream'}</div>
<div className="stream-viewer-subtitle">
{stream?.broadcasterName || '...'} {stream ? `\u00B7 ${stream.viewerCount} Zuschauer` : ''}
</div>
</div>
</div>
<button className="stream-viewer-close" onClick={leaveViewing}>Verlassen</button>
</div>
<div className="stream-viewer-video">
{viewing.phase === 'connecting' ? (
<div className="stream-viewer-connecting">
<div className="stream-viewer-spinner" />
Verbindung wird hergestellt...
</div>
) : viewing.phase === 'error' ? (
<div className="stream-viewer-connecting">
{viewing.error || 'Verbindungsfehler'}
<button className="stream-btn" onClick={leaveViewing}>Zur\u00FCck</button>
</div>
) : null}
<video ref={remoteVideoRef} autoPlay playsInline style={viewing.phase === 'connected' ? {} : { display: 'none' }} />
</div>
</div>
);
}
return (
<div className="stream-container">
{/* Error */}
{error && (
<div className="stream-error">
{error}
<button className="stream-error-dismiss" onClick={() => setError(null)}>{'\u00D7'}</button>
</div>
)}
{/* Top bar: name, title, start/stop */}
<div className="stream-topbar">
<input
className="stream-input stream-input-name"
placeholder="Dein Name"
value={userName}
onChange={e => setUserName(e.target.value)}
disabled={isBroadcasting}
/>
<input
className="stream-input stream-input-title"
placeholder="Stream-Titel"
value={streamTitle}
onChange={e => setStreamTitle(e.target.value)}
disabled={isBroadcasting}
/>
{isBroadcasting ? (
<button className="stream-btn stream-btn-stop" onClick={stopBroadcast}>
{'\u23F9'} Stream beenden
</button>
) : (
<button className="stream-btn" onClick={startBroadcast} disabled={starting}>
{starting ? 'Starte...' : '\u{1F5A5}\uFE0F Stream starten'}
</button>
)}
</div>
{/* Grid */}
{streams.length === 0 && !isBroadcasting ? (
<div className="stream-empty">
<div className="stream-empty-icon">{'\u{1F4FA}'}</div>
<h3>Keine aktiven Streams</h3>
<p>Starte einen Stream, um deinen Bildschirm zu teilen.</p>
</div>
) : (
<div className="stream-grid">
{/* Own broadcast tile (with local preview) */}
{isBroadcasting && (
<div className="stream-tile own broadcasting">
<div className="stream-tile-preview">
<video ref={localVideoRef} autoPlay playsInline muted />
<span className="stream-live-badge"><span className="stream-live-dot" /> LIVE</span>
<span className="stream-tile-viewers">
{'\u{1F465}'} {streams.find(s => s.id === myStreamId)?.viewerCount ?? 0}
</span>
</div>
<div className="stream-tile-info">
<div className="stream-tile-meta">
<div className="stream-tile-name">{userName} (Du)</div>
<div className="stream-tile-title">{streamTitle}</div>
</div>
<span className="stream-tile-time">
{myStreamId && streams.find(s => s.id === myStreamId)?.startedAt
? formatElapsed(streams.find(s => s.id === myStreamId)!.startedAt)
: '0:00'}
</span>
</div>
</div>
)}
{/* Other streams */}
{streams
.filter(s => s.id !== myStreamId)
.map(s => (
<div key={s.id} className="stream-tile" onClick={() => joinStream(s.id)}>
<div className="stream-tile-preview">
<span className="stream-tile-icon">{'\u{1F5A5}\uFE0F'}</span>
<span className="stream-live-badge"><span className="stream-live-dot" /> LIVE</span>
<span className="stream-tile-viewers">{'\u{1F465}'} {s.viewerCount}</span>
</div>
<div className="stream-tile-info">
<div className="stream-tile-meta">
<div className="stream-tile-name">{s.broadcasterName}</div>
<div className="stream-tile-title">{s.title}</div>
</div>
<span className="stream-tile-time">{formatElapsed(s.startedAt)}</span>
<button className="stream-tile-menu" onClick={e => e.stopPropagation()}>{'\u22EE'}</button>
</div>
</div>
))}
</div>
)}
</div>
);
}