import React, { useState, useRef, useEffect } from 'react'; import { Camera, X, AlertCircle, CheckCircle2 } from 'lucide-react'; import { InteractionEntropy } from '../lib/interactionEntropy'; interface EntropyStats { shannon: number; variance: number; uniqueColors: number; brightnessRange: [number, number]; rgbStats: { r: { mean: number; stddev: number }; g: { mean: number; stddev: number }; b: { mean: number; stddev: number }; }; histogram: number[]; // 10 buckets captureTimeMicros: number; interactionSamples: number; totalBits: number; dataSize: number; } interface CameraEntropyProps { wordCount: 12 | 24; onEntropyGenerated: (mnemonic: string, stats: EntropyStats) => void; onCancel: () => void; interactionEntropy: InteractionEntropy; } const CameraEntropy: React.FC = ({ wordCount, onEntropyGenerated, onCancel, interactionEntropy }) => { const [step, setStep] = useState<'permission' | 'capture' | 'processing' | 'stats'>('permission'); const [stream, setStream] = useState(null); const [entropy, setEntropy] = useState(0); const [variance, setVariance] = useState(0); const [captureEnabled, setCaptureEnabled] = useState(false); const [stats, setStats] = useState(null); const [generatedMnemonic, setGeneratedMnemonic] = useState(''); const [error, setError] = useState(''); const videoRef = useRef(null); const canvasRef = useRef(null); const animationRef = useRef(); const requestCameraAccess = async () => { try { console.log('ðŸŽĨ Requesting camera access...'); const mediaStream = await navigator.mediaDevices.getUserMedia({ video: true, audio: false }); console.log('✅ Camera stream obtained:', { tracks: mediaStream.getVideoTracks().map(t => ({ label: t.label, enabled: t.enabled, readyState: t.readyState, settings: t.getSettings() })) }); setStream(mediaStream); setStep('capture'); // Don't set up video here - let useEffect handle it after render } catch (err: any) { console.error('❌ Camera access error:', err.name, err.message, err); setError(`Camera unavailable: ${err.message}`); setTimeout(() => onCancel(), 2000); } }; // Set up video element when stream is available useEffect(() => { if (!stream || !videoRef.current) return; const video = videoRef.current; console.log('ðŸ“đ Setting up video element with stream...'); video.srcObject = stream; video.setAttribute('playsinline', ''); video.setAttribute('autoplay', ''); video.muted = true; const handleLoadedMetadata = () => { console.log('✅ Video metadata loaded:', { videoWidth: video.videoWidth, videoHeight: video.videoHeight, readyState: video.readyState }); video.play() .then(() => { console.log('✅ Video playing:', { paused: video.paused, currentTime: video.currentTime }); // Wait for actual frame data setTimeout(() => { // Test if video is actually rendering const testCanvas = document.createElement('canvas'); testCanvas.width = video.videoWidth; testCanvas.height = video.videoHeight; const testCtx = testCanvas.getContext('2d'); if (testCtx && video.videoWidth > 0 && video.videoHeight > 0) { testCtx.drawImage(video, 0, 0); const imageData = testCtx.getImageData(0, 0, Math.min(10, video.videoWidth), Math.min(10, video.videoHeight)); const pixels = Array.from(imageData.data.slice(0, 40)); console.log('ðŸŽĻ First 40 pixel values:', pixels); const allZero = pixels.every(p => p === 0); const allSame = pixels.every(p => p === pixels[0]); if (allZero) { console.error('❌ All pixels are zero - video not rendering!'); } else if (allSame) { console.warn('⚠ïļ All pixels same value - possible issue'); } else { console.log('✅ Video has actual frame data'); } } startEntropyAnalysis(); }, 300); }) .catch(err => { console.error('❌ video.play() failed:', err); setError('Failed to start video preview: ' + err.message); }); }; const handleVideoError = (err: any) => { console.error('❌ Video element error:', err); setError('Video playback error'); }; video.addEventListener('loadedmetadata', handleLoadedMetadata); video.addEventListener('error', handleVideoError); return () => { video.removeEventListener('loadedmetadata', handleLoadedMetadata); video.removeEventListener('error', handleVideoError); }; }, [stream]); // Run when stream changes const startEntropyAnalysis = () => { console.log('🔍 Starting entropy analysis...'); const analyze = () => { const video = videoRef.current; const canvas = canvasRef.current; if (!video || !canvas) { // If we are in processing/stats step, don't warn, just stop // This prevents race conditions during capture return; } // Critical: Wait for valid dimensions if (video.videoWidth === 0 || video.videoHeight === 0) { console.warn('⚠ïļ Video dimensions are 0, waiting...', { videoWidth: video.videoWidth, videoHeight: video.videoHeight, readyState: video.readyState }); animationRef.current = requestAnimationFrame(analyze); return; } const ctx = canvas.getContext('2d', { willReadFrequently: true }); if (!ctx) { console.error('❌ Failed to get canvas context'); return; } // Set canvas size to match video if (canvas.width !== video.videoWidth || canvas.height !== video.videoHeight) { canvas.width = video.videoWidth; canvas.height = video.videoHeight; console.log('📐 Canvas resized to:', canvas.width, 'x', canvas.height); } try { ctx.drawImage(video, 0, 0); const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height); // Check if we got actual data if (imageData.data.length === 0) { console.error('❌ ImageData is empty'); animationRef.current = requestAnimationFrame(analyze); return; } const { entropy: e, variance: v } = calculateQuickEntropy(imageData); setEntropy(e); setVariance(v); setCaptureEnabled(e >= 7.5 && v >= 1000); } catch (err) { console.error('❌ Error in entropy analysis:', err); } animationRef.current = requestAnimationFrame(analyze); }; analyze(); }; const calculateQuickEntropy = (imageData: ImageData): { entropy: number; variance: number } => { const data = imageData.data; const histogram = new Array(256).fill(0); let sum = 0; let count = 0; // Sample every 16th pixel for performance for (let i = 0; i < data.length; i += 16) { const gray = Math.floor((data[i] + data[i + 1] + data[i + 2]) / 3); histogram[gray]++; sum += gray; count++; } const mean = sum / count; // Shannon entropy let entropy = 0; for (const h_count of histogram) { if (h_count > 0) { const p = h_count / count; entropy -= p * Math.log2(p); } } // Variance let variance = 0; for (let i = 0; i < data.length; i += 16) { const gray = Math.floor((data[i] + data[i + 1] + data[i + 2]) / 3); variance += Math.pow(gray - mean, 2); } variance = variance / count; return { entropy, variance }; }; const captureEntropy = async () => { if (!videoRef.current || !canvasRef.current) return; // CRITICAL: Stop the analysis loop immediately if (animationRef.current) { cancelAnimationFrame(animationRef.current); console.log('🛑 Stopped entropy analysis loop'); } setStep('processing'); const canvas = canvasRef.current; const ctx = canvas.getContext('2d', { willReadFrequently: true }); if (!ctx) return; canvas.width = videoRef.current.videoWidth; canvas.height = videoRef.current.videoHeight; ctx.drawImage(videoRef.current, 0, 0, canvas.width, canvas.height); const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height); const captureTime = performance.now(); // Full entropy analysis const fullStats = await calculateFullEntropy(imageData, captureTime); // Generate mnemonic from entropy const mnemonic = await generateMnemonicFromEntropy(fullStats, wordCount, canvas); setStats(fullStats); setStep('stats'); // Stop camera if (stream) { stream.getTracks().forEach(track => track.stop()); console.log('📷 Camera stopped'); } // Don't call onEntropyGenerated yet - let user review stats first setGeneratedMnemonic(mnemonic); }; const calculateFullEntropy = async ( imageData: ImageData, captureTime: number ): Promise => { const data = imageData.data; const pixels = data.length / 4; const r: number[] = [], g: number[] = [], b: number[] = []; const histogram = new Array(10).fill(0); const colorSet = new Set(); let minBright = 255, maxBright = 0; const allGray: number[] = []; for (let i = 0; i < data.length; i += 4) { r.push(data[i]); g.push(data[i + 1]); b.push(data[i + 2]); const brightness = Math.floor((data[i] + data[i + 1] + data[i + 2]) / 3); allGray.push(brightness); const bucket = Math.floor(brightness / 25.6); histogram[Math.min(bucket, 9)]++; minBright = Math.min(minBright, brightness); maxBright = Math.max(maxBright, brightness); const color = (data[i] << 16) | (data[i + 1] << 8) | data[i + 2]; colorSet.add(color); } const grayHistogram = new Array(256).fill(0); for (const gray of allGray) { grayHistogram[gray]++; } let shannon = 0; for (const count of grayHistogram) { if (count > 0) { const p = count / pixels; shannon -= p * Math.log2(p); } } const calcStats = (arr: number[]): { mean: number; stddev: number } => { const mean = arr.reduce((a, b) => a + b, 0) / arr.length; const variance = arr.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / arr.length; return { mean, stddev: Math.sqrt(variance) }; }; const rgbStats = { r: calcStats(r), g: calcStats(g), b: calcStats(b) }; const variance = calcStats(allGray).stddev ** 2; return { shannon, variance, uniqueColors: colorSet.size, brightnessRange: [minBright, maxBright], rgbStats, histogram, captureTimeMicros: Math.floor((captureTime % 1) * 1000000), interactionSamples: interactionEntropy.getSampleCount().total, totalBits: 256, dataSize: data.length }; }; const generateMnemonicFromEntropy = async ( stats: EntropyStats, wordCount: 12 | 24, canvas: HTMLCanvasElement ): Promise => { // Mix multiple entropy sources const imageDataUrl = canvas.toDataURL(); // Now canvas is guaranteed not null const interactionBytes = await interactionEntropy.getEntropyBytes(); const cryptoBytes = crypto.getRandomValues(new Uint8Array(32)); const combined = [ imageDataUrl, stats.captureTimeMicros.toString(), Array.from(interactionBytes).join(','), Array.from(cryptoBytes).join(','), performance.now().toString() ].join('|'); const encoder = new TextEncoder(); const data = encoder.encode(combined); const hash = await crypto.subtle.digest('SHA-256', data); // Use bip39 to generate mnemonic from the collected entropy hash const { entropyToMnemonic } = await import('bip39'); const entropyLength = wordCount === 12 ? 16 : 32; const finalEntropy = new Uint8Array(hash).slice(0, entropyLength); // The bip39 library expects a hex string or a Buffer. const entropyHex = Buffer.from(finalEntropy).toString('hex'); return entropyToMnemonic(entropyHex); }; useEffect(() => { return () => { // Cleanup on unmount if (animationRef.current) { cancelAnimationFrame(animationRef.current); } if (stream) { stream.getTracks().forEach(track => track.stop()); } }; }, [stream]); const getStatusMessage = () => { if (entropy >= 7.0 && variance >= 800) { return { icon: CheckCircle2, text: '✅ Excellent entropy - ready!', color: '#39ff14' }; } else if (entropy >= 6.0 && variance >= 500) { return { icon: AlertCircle, text: 'ðŸŸĄ Good - point to brighter area', color: '#ffd700' }; } else if (entropy >= 5.0) { return { icon: AlertCircle, text: '🟠 Low - find textured surface', color: '#ff9500' }; } else { return { icon: AlertCircle, text: 'ðŸ”ī Too low - point at lamp/pattern', color: '#ff006e' }; } }; return (
{step === 'permission' && (

Camera Permission Needed

To generate entropy, we need:

  • Camera access to capture pixel noise
  • Image data processed locally
  • Never stored or transmitted
  • Camera auto-closes after use
)} {step === 'capture' && (

Instructions:

Point camera at bright, textured surface (lamp, carpet, wall with pattern)

Entropy Quality: {entropy.toFixed(2)}/8.0
{getStatusMessage().text}
)} {step === 'processing' && (

Processing entropy...

)} {step === 'stats' && stats && (

Entropy Analysis

Primary Source:

Camera Sensor Noise

RANDOMNESS METRICS:

Shannon Entropy:
{stats.shannon.toFixed(2)}/8.00
Pixel Variance:
{stats.variance.toFixed(1)}
Unique Colors:
{stats.uniqueColors.toLocaleString()}
Brightness Range:
{stats.brightnessRange[0]}-{stats.brightnessRange[1]}

RGB DISTRIBUTION:

Red:Ξ={stats.rgbStats.r.mean.toFixed(0)} σ={stats.rgbStats.r.stddev.toFixed(1)}
Green:Ξ={stats.rgbStats.g.mean.toFixed(0)} σ={stats.rgbStats.g.stddev.toFixed(1)}
Blue:Ξ={stats.rgbStats.b.mean.toFixed(0)} σ={stats.rgbStats.b.stddev.toFixed(1)}

BRIGHTNESS HISTOGRAM:

{stats.histogram.map((val, i) => { const max = Math.max(...stats.histogram); const height = (val / max) * 100; return (
); })}
DarkBright

TIMING ENTROPY:

Capture timing:
...{stats.captureTimeMicros}Ξs
Interaction samples:
{stats.interactionSamples}

MIXED WITH:

- crypto.getRandomValues() ✓
- performance.now() ✓
- Mouse/keyboard timing ✓
Total Entropy: {stats.totalBits} bits

HOW SEED IS GENERATED:

1. Camera captures {stats.uniqueColors.toLocaleString()} unique pixel colors
2. Pixel data hashed with SHA-256 ({(stats.dataSize / 1024).toFixed(1)}KB raw data)
3. Mixed with timing entropy ({stats.captureTimeMicros}Ξs precision)
4. Combined with {stats.interactionSamples} user interaction samples
5. Enhanced with crypto.getRandomValues() (32 bytes)
6. Final hash → {wordCount === 12 ? '128' : '256'} bits → {wordCount} BIP39 words

GENERATED SEED:

{generatedMnemonic}

⚠ïļ Hover to reveal - Write this down securely

)} {error && (

{error}

)}
); }; export default CameraEntropy;