/* eslint-disable */ /* DemoStage — the streaming demo (canvas + WebSocket + event capture) wrapped as a React component so it can mount/unmount with the play overlay. Most of the inner logic is imperative and lives in refs; React state is used only for what the surrounding chrome needs to re-render (status pill, HUD counters, held set). Lifecycle: mount → open WS, send hello, await first frame ready → enable Play (parent renders the button) play → forward "play" to server, queue arriving frames after 8 frames of block 0 arrive → enable capture (keydown/keyup) unmount → close WS, release session lock on the server */ const { useState, useEffect, useRef, useCallback } = React; const FIRST_FRAME_SENTINEL = 0xFFFFFFFF; const HEADER_SIZE = 24; // Header layout: // 0..7 display_t_ms (float64) // 8..11 seq (uint32) // 12..15 jpeg_len (uint32) // 16..23 reserved // Body: jpeg bytes only. // // GC samples arrive on a separate JSON message stream: // {"type":"gc_tick","display_t_ms":,"held":[...]} function makeUid() { return 'u-' + Math.random().toString(36).slice(2, 10); } const DemoStage = React.forwardRef(function DemoStageInner({ onClose, onState }, ref) { // External-facing state for chrome const [status, setStatus] = useState('connecting'); // connecting | ready | starting | streaming | error | closed const [statusMsg, setStatusMsg] = useState('connecting…'); const [recvCount, setRecvCount] = useState(0); const [queueLen, setQueueLen] = useState(0); const [heldDisplay, setHeldDisplay] = useState([]); // live keys held by browser (input) const [renderedHeld, setRenderedHeld] = useState([]); // 60 Hz GC sample held set, aligned with the currently-displayed frame const [pingMs, setPingMs] = useState(null); // last measured WS round-trip const [denoiseMs, setDenoiseMs] = useState(null); // last block's server-reported denoise time const [decodeMs, setDecodeMs] = useState(null); // last block's server-reported decode time // Bubble up state changes so PlayOverlay can render the HUD as a sibling // of the stage frame (not nested inside it). useEffect(() => { if (onState) { onState({ status, statusMsg, recvCount, queueLen, heldDisplay, renderedHeld, pingMs, denoiseMs, decodeMs, }); } }, [onState, status, statusMsg, recvCount, queueLen, heldDisplay, renderedHeld, pingMs, denoiseMs, decodeMs]); // Refs holding mutable state used inside the render loop / WS callbacks const canvasRef = useRef(null); const wsRef = useRef(null); const clockOffsetRef = useRef(0); const renderQueueRef = useRef([]); // GC samples (60 Hz). Each entry: { t_ms_abs, held }, sorted ascending by t_ms_abs. // Drained at RAF cadence: latest sample with t_ms_abs <= now becomes the // displayed held set. This decouples GC visualization from the 15 fps // frame cadence so the keyboard updates at full sampling rate. const gcSampleQueueRef = useRef([]); const firstFrameDrawnRef = useRef(false); const streamingRef = useRef(false); const firstBlockSeenRef = useRef(0); const heldLocalRef = useRef(new Set()); const rafRef = useRef(0); const captureWiredRef = useRef(false); const uidRef = useRef(makeUid()); // Most-recent outstanding ping's client_t_ms (we only keep one in flight). // The pong handler reads this to compute RTT. const pendingPingRef = useRef(null); // ---- send helpers ------------------------------------------------------- const sendJSON = useCallback((obj) => { const ws = wsRef.current; if (!ws || ws.readyState !== WebSocket.OPEN) return; ws.send(JSON.stringify(obj)); }, []); // ---- capture wiring ----------------------------------------------------- const onKeyDown = useCallback((e) => { if (!streamingRef.current) return; if (e.repeat) return; if (heldLocalRef.current.has(e.code)) return; heldLocalRef.current.add(e.code); sendJSON({ type: 'event', event: { kind: 'key', code: e.code, state: 'down' } }); if (['Space', 'ArrowUp', 'ArrowDown', 'ArrowLeft', 'ArrowRight'].includes(e.code)) { e.preventDefault(); } setHeldDisplay(Array.from(heldLocalRef.current).sort()); }, [sendJSON]); const onKeyUp = useCallback((e) => { if (!streamingRef.current) return; if (!heldLocalRef.current.has(e.code)) return; heldLocalRef.current.delete(e.code); sendJSON({ type: 'event', event: { kind: 'key', code: e.code, state: 'up' } }); setHeldDisplay(Array.from(heldLocalRef.current).sort()); }, [sendJSON]); const onBlurEvent = useCallback(() => { if (!streamingRef.current) return; for (const code of heldLocalRef.current) { sendJSON({ type: 'event', event: { kind: 'key', code, state: 'up', synth: true } }); } heldLocalRef.current.clear(); setHeldDisplay([]); }, [sendJSON]); const wireCapture = useCallback(() => { if (captureWiredRef.current) return; captureWiredRef.current = true; window.addEventListener('keydown', onKeyDown); window.addEventListener('keyup', onKeyUp); window.addEventListener('blur', onBlurEvent); }, [onKeyDown, onKeyUp, onBlurEvent]); const teardownCapture = useCallback(() => { if (!captureWiredRef.current) return; captureWiredRef.current = false; window.removeEventListener('keydown', onKeyDown); window.removeEventListener('keyup', onKeyUp); window.removeEventListener('blur', onBlurEvent); heldLocalRef.current.clear(); setHeldDisplay([]); }, [onKeyDown, onKeyUp, onBlurEvent]); // ---- WebSocket message handlers ---------------------------------------- const handleText = useCallback((text) => { let msg; try { msg = JSON.parse(text); } catch { return; } switch (msg.type) { case 'hello_ack': { clockOffsetRef.current = msg.server_now_ms - performance.now(); setStatusMsg('waiting for first frame…'); break; } case 'play_ack': { if (msg.ok) { setStatus('starting'); setStatusMsg('starting…'); } else { setStatus('error'); setStatusMsg(`play failed: ${msg.err || 'unknown'}`); } break; } case 'reset_ack': { renderQueueRef.current.length = 0; gcSampleQueueRef.current.length = 0; firstBlockSeenRef.current = 0; streamingRef.current = false; teardownCapture(); setRecvCount(0); setQueueLen(0); setRenderedHeld([]); setDenoiseMs(null); setDecodeMs(null); setStatus('ready'); setStatusMsg('reset — press Play'); break; } case 'block_stats': { if (typeof msg.denoise_ms === 'number') setDenoiseMs(msg.denoise_ms); if (typeof msg.decode_ms === 'number') setDecodeMs(msg.decode_ms); break; } case 'pong': { const sent = pendingPingRef.current; if (sent != null) { pendingPingRef.current = null; setPingMs(performance.now() - sent); } break; } case 'gc_tick': { // 60 Hz keyboard sample, decoupled from frames. Insert into the // GC queue ordered by absolute display time. The RAF loop drains it. const t_ms_abs = msg.display_t_ms; const held = Array.isArray(msg.held) ? msg.held : []; const q = gcSampleQueueRef.current; // Most ticks arrive monotonically, so push and only sort if needed. if (q.length === 0 || t_ms_abs >= q[q.length - 1].t_ms_abs) { q.push({ t_ms_abs, held }); } else { q.push({ t_ms_abs, held }); q.sort((a, b) => a.t_ms_abs - b.t_ms_abs); } break; } case 'error': { setStatus('error'); setStatusMsg(`error: ${msg.err}`); break; } default: // ignore } }, [teardownCapture]); const handleBinary = useCallback((arrbuf) => { if (arrbuf.byteLength < HEADER_SIZE) return; const view = new DataView(arrbuf); const display_t_ms = view.getFloat64(0, true); const seq = view.getUint32(8, true); const jpeg_len = view.getUint32(12, true); if (HEADER_SIZE + jpeg_len > arrbuf.byteLength) return; const jpegBlob = new Blob( [new Uint8Array(arrbuf, HEADER_SIZE, jpeg_len)], { type: 'image/jpeg' } ); createImageBitmap(jpegBlob).then((bmp) => { const canvas = canvasRef.current; if (!canvas) { bmp.close(); return; } if (seq === FIRST_FRAME_SENTINEL) { const ctx = canvas.getContext('2d'); ctx.drawImage(bmp, 0, 0, canvas.width, canvas.height); bmp.close(); firstFrameDrawnRef.current = true; setRenderedHeld([]); setStatus('ready'); setStatusMsg('ready — press Play'); return; } renderQueueRef.current.push({ display_t_ms, seq, bmp }); renderQueueRef.current.sort((a, b) => a.seq - b.seq); setRecvCount((n) => n + 1); setQueueLen(renderQueueRef.current.length); if (!streamingRef.current) { firstBlockSeenRef.current += 1; if (firstBlockSeenRef.current >= 8) { streamingRef.current = true; wireCapture(); setStatus('streaming'); setStatusMsg('streaming · capture active'); } } }); }, [wireCapture]); // ---- Render loop -------------------------------------------------------- useEffect(() => { let lastHeldKey = ''; // dedupe setRenderedHeld calls function tick() { if (firstFrameDrawnRef.current) { const now = performance.now() + clockOffsetRef.current; const canvas = canvasRef.current; if (canvas) { const ctx = canvas.getContext('2d'); while (renderQueueRef.current.length && renderQueueRef.current[0].display_t_ms <= now) { const f = renderQueueRef.current.shift(); ctx.drawImage(f.bmp, 0, 0, canvas.width, canvas.height); f.bmp.close(); } setQueueLen(renderQueueRef.current.length); // Drain GC samples up to now, keeping the most recent. Each sample // is a 60 Hz tick; multiple may fire between two displayed frames, // and that's the point — the keyboard updates at GC sampling rate // independent of the video frame rate. let latest = null; while ( gcSampleQueueRef.current.length && gcSampleQueueRef.current[0].t_ms_abs <= now ) { latest = gcSampleQueueRef.current.shift(); } if (latest) { const key = latest.held.join(','); if (key !== lastHeldKey) { lastHeldKey = key; setRenderedHeld(latest.held); } } } } rafRef.current = requestAnimationFrame(tick); } rafRef.current = requestAnimationFrame(tick); return () => cancelAnimationFrame(rafRef.current); }, []); // ---- WebSocket connect / disconnect ------------------------------------ useEffect(() => { const wsProto = location.protocol === 'https:' ? 'wss:' : 'ws:'; const ws = new WebSocket(`${wsProto}//${location.host}/ws`); ws.binaryType = 'arraybuffer'; wsRef.current = ws; ws.addEventListener('open', () => { sendJSON({ type: 'hello', uid: uidRef.current }); }); // Periodic ping so the StatsPanel can show a live RTT. We keep at most // one ping in flight; if the previous pong hasn't arrived we just // overwrite the timestamp (the older sample is stale anyway). const pingInterval = setInterval(() => { if (ws.readyState !== WebSocket.OPEN) return; pendingPingRef.current = performance.now(); ws.send(JSON.stringify({ type: 'ping' })); }, 1000); ws.addEventListener('message', (ev) => { if (typeof ev.data === 'string') handleText(ev.data); else handleBinary(ev.data); }); ws.addEventListener('close', (ev) => { setStatus('closed'); setStatusMsg(`disconnected (${ev.code})`); }); ws.addEventListener('error', () => { setStatus('error'); setStatusMsg('websocket error'); }); return () => { clearInterval(pingInterval); teardownCapture(); try { ws.close(); } catch (_) {} wsRef.current = null; // Drop any queued bitmaps to free GPU memory. for (const f of renderQueueRef.current) { try { f.bmp.close(); } catch (_) {} } renderQueueRef.current.length = 0; gcSampleQueueRef.current.length = 0; }; // The handlers depend on stable refs; we want exactly one connect per mount. // eslint-disable-next-line react-hooks/exhaustive-deps }, []); // ---- Expose actions to parent via ref ---------------------------------- React.useImperativeHandle(ref, () => ({ play: () => sendJSON({ type: 'play' }), reset: () => sendJSON({ type: 'reset' }), canPlay: () => firstFrameDrawnRef.current && status !== 'starting' && status !== 'streaming', canReset: () => firstFrameDrawnRef.current, status, statusMsg, }), [sendJSON, status, statusMsg]); return ( <>
{statusMsg}
); }); window.DemoStage = DemoStage; function DemoHud({ recvCount, queueLen, heldDisplay, streaming }) { // The visual keyboard above this strip is the server-aligned state; here // we show the live keyboard input as text and a couple of debug counters. return (
pressed keys {heldDisplay.length === 0 ? {streaming ? '—' : 'capture off'} : heldDisplay.join(', ')} recv 0 ? 'is-active' : ''}`}>{recvCount} queue {queueLen}
); } window.DemoHud = DemoHud;