Initial commit: Electron video/audio calling client
This commit is contained in:
commit
9eb33512f4
22 changed files with 8848 additions and 0 deletions
503
src/renderer/src/App.tsx
Normal file
503
src/renderer/src/App.tsx
Normal file
|
|
@ -0,0 +1,503 @@
|
|||
import { useState, useEffect, useRef } from "react";
|
||||
import "./index.css";
|
||||
|
||||
import { Lobby } from "./components/Lobby";
|
||||
import { Stage } from "./components/Stage";
|
||||
import { ControlBar } from "./components/ControlBar";
|
||||
import { DeviceSelector } from "./components/DeviceSelector";
|
||||
import { PeerInfo } from "./types";
|
||||
|
||||
const audioWorkletCode = `
|
||||
class PCMProcessor extends AudioWorkletProcessor {
|
||||
process(inputs, outputs, parameters) {
|
||||
const input = inputs[0];
|
||||
if (input && input.length > 0) {
|
||||
const inputChannel = input[0];
|
||||
// Post full buffer to main thread (renderer)
|
||||
// optimization: we could accumulate here if needed
|
||||
this.port.postMessage(inputChannel);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
registerProcessor('pcm-processor', PCMProcessor);
|
||||
`;
|
||||
|
||||
interface JoinedPayload {
|
||||
self_id: number;
|
||||
room: string;
|
||||
peers: PeerInfo[];
|
||||
}
|
||||
|
||||
function App() {
|
||||
const [connected, setConnected] = useState(false);
|
||||
const [selfId, setSelfId] = useState<number | null>(null);
|
||||
const [peers, setPeers] = useState<PeerInfo[]>([]);
|
||||
const [error, setError] = useState("");
|
||||
const [logs, setLogs] = useState<string[]>([]);
|
||||
|
||||
const addLog = (msg: string) => {
|
||||
console.log(msg);
|
||||
setLogs(prev => [...prev.slice(-19), `[${new Date().toLocaleTimeString()}] ${msg}`]);
|
||||
};
|
||||
|
||||
// Media State
|
||||
const [audioEnabled, setAudioEnabled] = useState(false);
|
||||
const [videoEnabled, setVideoEnabled] = useState(false);
|
||||
const [screenEnabled, setScreenEnabled] = useState(false);
|
||||
const [selectedAudioDevice, setSelectedAudioDevice] = useState<string>("");
|
||||
|
||||
// Video Handling
|
||||
const localVideoRef = useRef<HTMLVideoElement>(null);
|
||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
const [peerVideoUrls, setPeerVideoUrls] = useState<{ [key: number]: string }>({});
|
||||
|
||||
// Event Listeners
|
||||
useEffect(() => {
|
||||
// @ts-ignore
|
||||
const removeJoined = window.electron.ipcRenderer.on('connect-success', (_, data: JoinedPayload) => {
|
||||
// We might get this as return from invoke, but also good to have event.
|
||||
});
|
||||
|
||||
// Peer Joined
|
||||
// @ts-ignore
|
||||
const removePeerJoined = window.electron.ipcRenderer.on("peer-joined", (_, data) => {
|
||||
addLog(`PeerJoined: ${JSON.stringify(data)}`);
|
||||
if (data && data.user_id) {
|
||||
setPeers((prev) => {
|
||||
if (prev.find(p => p.user_id === data.user_id)) {
|
||||
console.log("Peer already exists:", data.user_id);
|
||||
return prev;
|
||||
}
|
||||
console.log("Adding peer:", data);
|
||||
return [...prev, { user_id: data.user_id, display_name: data.display_name }];
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Peer Left
|
||||
// @ts-ignore
|
||||
const removePeerLeft = window.electron.ipcRenderer.on("peer-left", (_, data) => {
|
||||
addLog(`PeerLeft: ${JSON.stringify(data)}`);
|
||||
if (data && data.user_id) {
|
||||
setPeers((prev) => prev.filter(p => p.user_id !== data.user_id));
|
||||
setPeerVideoUrls(prev => {
|
||||
const newState = { ...prev };
|
||||
if (newState[data.user_id]) {
|
||||
URL.revokeObjectURL(newState[data.user_id]);
|
||||
delete newState[data.user_id];
|
||||
}
|
||||
return newState;
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Video Frame
|
||||
// @ts-ignore
|
||||
const removeVideo = window.electron.ipcRenderer.on("video-frame", (_, payload) => {
|
||||
const { user_id, data } = payload;
|
||||
console.log("Video frame from:", user_id, "size:", data?.length || 0);
|
||||
const uint8Array = new Uint8Array(data);
|
||||
const blob = new Blob([uint8Array], { type: 'image/jpeg' });
|
||||
const url = URL.createObjectURL(blob);
|
||||
|
||||
setPeerVideoUrls(prev => {
|
||||
if (prev[user_id]) URL.revokeObjectURL(prev[user_id]);
|
||||
return { ...prev, [user_id]: url };
|
||||
});
|
||||
});
|
||||
|
||||
// Audio Frame Playback with proper jitter buffer
|
||||
const playbackCtxRef = { current: null as AudioContext | null };
|
||||
const nextPlayTimeRef = { current: 0 };
|
||||
const JITTER_BUFFER_MS = 80; // Buffer 80ms before starting playback
|
||||
const bufferQueue: Float32Array[] = [];
|
||||
let isStarted = false;
|
||||
|
||||
const scheduleBuffer = (float32: Float32Array) => {
|
||||
const ctx = playbackCtxRef.current;
|
||||
if (!ctx) return;
|
||||
|
||||
const buffer = ctx.createBuffer(1, float32.length, 48000);
|
||||
buffer.copyToChannel(float32 as any, 0);
|
||||
const source = ctx.createBufferSource();
|
||||
source.buffer = buffer;
|
||||
source.connect(ctx.destination);
|
||||
|
||||
// Schedule at precise time to avoid gaps
|
||||
const now = ctx.currentTime;
|
||||
if (nextPlayTimeRef.current < now) {
|
||||
// We've fallen behind, reset
|
||||
nextPlayTimeRef.current = now + JITTER_BUFFER_MS / 1000;
|
||||
}
|
||||
source.start(nextPlayTimeRef.current);
|
||||
nextPlayTimeRef.current += buffer.duration;
|
||||
};
|
||||
|
||||
const flushBuffer = () => {
|
||||
while (bufferQueue.length > 0) {
|
||||
scheduleBuffer(bufferQueue.shift()!);
|
||||
}
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
const removeAudio = window.electron.ipcRenderer.on("audio-frame", (_, payload) => {
|
||||
try {
|
||||
const { data } = payload;
|
||||
if (!playbackCtxRef.current) {
|
||||
playbackCtxRef.current = new AudioContext({ sampleRate: 48000 });
|
||||
nextPlayTimeRef.current = playbackCtxRef.current.currentTime + JITTER_BUFFER_MS / 1000;
|
||||
}
|
||||
|
||||
const ctx = playbackCtxRef.current;
|
||||
if (ctx.state === 'suspended') {
|
||||
ctx.resume();
|
||||
}
|
||||
|
||||
// Convert Uint8Array (bytes) to Int16 PCM then to Float32
|
||||
const uint8 = new Uint8Array(data);
|
||||
const int16 = new Int16Array(uint8.buffer, uint8.byteOffset, uint8.length / 2);
|
||||
const float32 = new Float32Array(int16.length);
|
||||
|
||||
for (let i = 0; i < int16.length; i++) {
|
||||
float32[i] = int16[i] / 32768;
|
||||
}
|
||||
|
||||
if (!isStarted) {
|
||||
// Buffer a few packets before starting
|
||||
bufferQueue.push(float32);
|
||||
if (bufferQueue.length >= 3) {
|
||||
isStarted = true;
|
||||
flushBuffer();
|
||||
}
|
||||
} else {
|
||||
scheduleBuffer(float32);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("Audio playback error:", e);
|
||||
}
|
||||
});
|
||||
|
||||
return () => {
|
||||
removePeerJoined();
|
||||
removePeerLeft();
|
||||
removeVideo();
|
||||
removeAudio();
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Frame Capture Loop
|
||||
useEffect(() => {
|
||||
let animationFrameId: number;
|
||||
let isActive = true;
|
||||
|
||||
const sendFrame = async () => {
|
||||
if (!isActive) return;
|
||||
|
||||
if (videoEnabled && localVideoRef.current && canvasRef.current) {
|
||||
const video = localVideoRef.current;
|
||||
const canvas = canvasRef.current;
|
||||
const ctx = canvas.getContext('2d');
|
||||
|
||||
if (ctx && video.readyState === 4) {
|
||||
canvas.width = 320; // Low res for MVP
|
||||
canvas.height = 240;
|
||||
ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
|
||||
|
||||
canvas.toBlob(async (blob) => {
|
||||
if (blob && isActive && videoEnabled && connected) {
|
||||
try {
|
||||
const arrayBuffer = await blob.arrayBuffer();
|
||||
const uint8Array = new Uint8Array(arrayBuffer);
|
||||
// @ts-ignore
|
||||
window.electron.ipcRenderer.send("send-video-frame", { frame: uint8Array });
|
||||
} catch (e) {
|
||||
// Ignore send errors
|
||||
}
|
||||
}
|
||||
}, 'image/jpeg', 0.5);
|
||||
}
|
||||
}
|
||||
setTimeout(() => {
|
||||
if (isActive) animationFrameId = requestAnimationFrame(sendFrame);
|
||||
}, 100);
|
||||
};
|
||||
|
||||
if (videoEnabled) {
|
||||
sendFrame();
|
||||
}
|
||||
|
||||
return () => {
|
||||
isActive = false;
|
||||
cancelAnimationFrame(animationFrameId);
|
||||
};
|
||||
}, [videoEnabled, connected]);
|
||||
|
||||
// Camera Access - re-trigger when connected changes
|
||||
useEffect(() => {
|
||||
if (videoEnabled) {
|
||||
navigator.mediaDevices.getUserMedia({ video: { width: 320, height: 240 }, audio: false })
|
||||
.then(stream => {
|
||||
if (localVideoRef.current) {
|
||||
localVideoRef.current.srcObject = stream;
|
||||
}
|
||||
})
|
||||
.catch(err => {
|
||||
console.error("Error accessing camera:", err);
|
||||
setVideoEnabled(false);
|
||||
setError("Failed to access camera");
|
||||
});
|
||||
} else {
|
||||
if (localVideoRef.current && localVideoRef.current.srcObject) {
|
||||
const stream = localVideoRef.current.srcObject as MediaStream;
|
||||
stream.getTracks().forEach(track => track.stop());
|
||||
localVideoRef.current.srcObject = null;
|
||||
}
|
||||
}
|
||||
}, [videoEnabled, connected]);
|
||||
|
||||
// Audio Capture
|
||||
const audioContextRef = useRef<AudioContext | null>(null);
|
||||
const audioStreamRef = useRef<MediaStream | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
let isCancelled = false;
|
||||
|
||||
const cleanup = () => {
|
||||
// Stop tracks
|
||||
if (audioStreamRef.current) {
|
||||
audioStreamRef.current.getTracks().forEach(t => t.stop());
|
||||
audioStreamRef.current = null;
|
||||
}
|
||||
// Close context
|
||||
if (audioContextRef.current) {
|
||||
if (audioContextRef.current.state !== 'closed') {
|
||||
audioContextRef.current.close().catch(e => console.error("Error closing ctx", e));
|
||||
}
|
||||
audioContextRef.current = null;
|
||||
}
|
||||
};
|
||||
|
||||
if (!audioEnabled || !connected) {
|
||||
cleanup();
|
||||
return;
|
||||
}
|
||||
|
||||
addLog(`Starting audio... Device: ${selectedAudioDevice || 'Default'}`);
|
||||
|
||||
async function startAudio() {
|
||||
try {
|
||||
// Short delay to allow previous cleanup to settle if rapid toggling
|
||||
await new Promise(r => setTimeout(r, 100));
|
||||
if (isCancelled) return;
|
||||
|
||||
const constraints = {
|
||||
audio: {
|
||||
deviceId: selectedAudioDevice ? { exact: selectedAudioDevice } : undefined,
|
||||
echoCancellation: true,
|
||||
noiseSuppression: true,
|
||||
autoGainControl: true
|
||||
},
|
||||
video: false
|
||||
};
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia(constraints);
|
||||
|
||||
if (isCancelled) {
|
||||
stream.getTracks().forEach(t => t.stop());
|
||||
return;
|
||||
}
|
||||
|
||||
addLog(`Mic Gained: ${stream.getAudioTracks()[0].label}`);
|
||||
|
||||
audioStreamRef.current = stream;
|
||||
|
||||
// Create context (if allowed by browser policy - usually requires interaction, which we have via button click)
|
||||
const ctx = new AudioContext({ sampleRate: 48000, latencyHint: 'interactive' });
|
||||
audioContextRef.current = ctx;
|
||||
|
||||
// Load Worklet
|
||||
// Note: creating blob URL every time is fine, browsers handle it.
|
||||
const blob = new Blob([audioWorkletCode], { type: 'application/javascript' });
|
||||
const workletUrl = URL.createObjectURL(blob);
|
||||
|
||||
let useWorklet = false;
|
||||
try {
|
||||
await ctx.audioWorklet.addModule(workletUrl);
|
||||
useWorklet = true;
|
||||
} catch (e) {
|
||||
console.warn("Worklet addModule failed", e);
|
||||
}
|
||||
URL.revokeObjectURL(workletUrl);
|
||||
|
||||
if (isCancelled) {
|
||||
ctx.close();
|
||||
return;
|
||||
}
|
||||
|
||||
const source = ctx.createMediaStreamSource(stream);
|
||||
|
||||
if (useWorklet) {
|
||||
try {
|
||||
addLog("Creating AudioWorkletNode...");
|
||||
const workletNode = new AudioWorkletNode(ctx, 'pcm-processor');
|
||||
workletNode.port.onmessage = (e) => {
|
||||
if (!audioEnabled || !connected || isCancelled) return;
|
||||
const float32 = e.data;
|
||||
const pcm = new Int16Array(float32.length);
|
||||
for (let i = 0; i < float32.length; i++) {
|
||||
pcm[i] = Math.max(-32768, Math.min(32767, Math.floor(float32[i] * 32768)));
|
||||
}
|
||||
// @ts-ignore
|
||||
window.electron.ipcRenderer.send('send-audio-frame', { frame: pcm.buffer });
|
||||
};
|
||||
source.connect(workletNode);
|
||||
workletNode.connect(ctx.destination);
|
||||
addLog("Audio Worklet running");
|
||||
return; // Success!
|
||||
} catch (e: any) {
|
||||
console.error("Worklet Node creation failed", e);
|
||||
addLog(`Worklet Node failed: ${e.message}`);
|
||||
// Fall through to fallback
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to ScriptProcessor
|
||||
addLog("Falling back to ScriptProcessor...");
|
||||
// @ts-ignore
|
||||
const scriptNode = ctx.createScriptProcessor(4096, 1, 1);
|
||||
// @ts-ignore
|
||||
scriptNode.onaudioprocess = (e) => {
|
||||
if (!audioEnabled || !connected || isCancelled) return;
|
||||
const inputData = e.inputBuffer.getChannelData(0);
|
||||
const pcm = new Int16Array(inputData.length);
|
||||
for (let i = 0; i < inputData.length; i++) {
|
||||
pcm[i] = Math.max(-32768, Math.min(32767, Math.floor(inputData[i] * 32768)));
|
||||
}
|
||||
// @ts-ignore
|
||||
window.electron.ipcRenderer.send('send-audio-frame', { frame: pcm.buffer });
|
||||
};
|
||||
source.connect(scriptNode);
|
||||
// @ts-ignore
|
||||
scriptNode.connect(ctx.destination);
|
||||
addLog("ScriptProcessor running");
|
||||
|
||||
} catch (err: any) {
|
||||
if (isCancelled) return; // Ignore errors if we cancelled
|
||||
console.error('Audio capture error:', err);
|
||||
if (err.name === 'AbortError' || err.name === 'NotAllowedError') {
|
||||
addLog(`Permission/Abort Error: ${err.message}`);
|
||||
setError(`Mic blocked/aborted: ${err.message}`);
|
||||
} else {
|
||||
addLog(`Mic Error: ${err.message}`);
|
||||
setError(`Mic Error: ${err.message}`);
|
||||
}
|
||||
setAudioEnabled(false); // Reset state
|
||||
}
|
||||
}
|
||||
|
||||
startAudio();
|
||||
|
||||
return () => {
|
||||
isCancelled = true;
|
||||
cleanup();
|
||||
};
|
||||
}, [audioEnabled, connected, selectedAudioDevice]);
|
||||
|
||||
async function handleJoin(roomCode: string, displayName: string, initialVideo: boolean, initialAudio: boolean) {
|
||||
if (!roomCode || !displayName) return;
|
||||
setVideoEnabled(initialVideo);
|
||||
setAudioEnabled(initialAudio);
|
||||
setError("");
|
||||
try {
|
||||
// @ts-ignore
|
||||
const result = await window.electron.ipcRenderer.invoke("connect", { roomCode, displayName });
|
||||
if (result) {
|
||||
addLog(`Connected: Self=${result.self_id}, Peers=${result.peers.length}`);
|
||||
setSelfId(result.self_id);
|
||||
setPeers(result.peers);
|
||||
setConnected(true);
|
||||
}
|
||||
} catch (e: any) {
|
||||
console.error(e);
|
||||
const errMsg = typeof e === 'string' ? e : JSON.stringify(e);
|
||||
addLog(`Error: ${errMsg}`);
|
||||
setError(errMsg);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleLeave() {
|
||||
setVideoEnabled(false);
|
||||
// @ts-ignore
|
||||
await window.electron.ipcRenderer.invoke("disconnect");
|
||||
setConnected(false);
|
||||
setPeers([]);
|
||||
setSelfId(null);
|
||||
setPeerVideoUrls({});
|
||||
}
|
||||
|
||||
const toggleAudio = () => setAudioEnabled(!audioEnabled);
|
||||
const toggleVideo = () => setVideoEnabled(!videoEnabled);
|
||||
const toggleScreen = () => setScreenEnabled(!screenEnabled);
|
||||
|
||||
return (
|
||||
<div className="h-screen w-screen bg-[#202124] text-white overflow-hidden font-sans select-none">
|
||||
|
||||
{!connected ? (
|
||||
<Lobby onJoin={handleJoin} />
|
||||
) : (
|
||||
<div className="relative w-full h-full flex flex-col">
|
||||
{/* Main Stage */}
|
||||
<div className="flex-1 overflow-hidden p-4">
|
||||
<Stage
|
||||
selfId={selfId}
|
||||
displayName={"You"} // Or pass from state
|
||||
peers={peers}
|
||||
// @ts-ignore
|
||||
peerVideoUrls={peerVideoUrls}
|
||||
localVideoRef={localVideoRef}
|
||||
videoEnabled={videoEnabled}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="absolute bottom-6 left-1/2 -translate-x-1/2 z-50 flex items-end gap-2">
|
||||
{/* Device Selector on the left or integrated? */}
|
||||
{/* Make it float above or left of the mic button */}
|
||||
<div className="relative">
|
||||
<ControlBar
|
||||
onLeave={handleLeave}
|
||||
audioEnabled={audioEnabled}
|
||||
toggleAudio={toggleAudio}
|
||||
videoEnabled={videoEnabled}
|
||||
toggleVideo={toggleVideo}
|
||||
/>
|
||||
<div className="absolute top-0 right-[-40px] translate-y-2">
|
||||
<DeviceSelector
|
||||
onDeviceChange={setSelectedAudioDevice}
|
||||
currentDeviceId={selectedAudioDevice}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Hidden Canvas for capture */}
|
||||
<canvas ref={canvasRef} className="hidden" />
|
||||
|
||||
{/* Debug Console */}
|
||||
<div className="absolute top-0 left-0 bg-black/80 text-green-400 p-2 text-xs font-mono h-32 w-full overflow-y-auto z-50 pointer-events-none opacity-50 hover:opacity-100">
|
||||
{logs.map((log, i) => <div key={i}>{log}</div>)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Error Toast */}
|
||||
{error && (
|
||||
<div className="absolute top-4 right-4 bg-red-500 text-white px-4 py-2 rounded shadow-lg animate-bounce z-50">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default App;
|
||||
Loading…
Add table
Add a link
Reference in a new issue