mirror of
https://github.com/kccleoc/seedpgp-web.git
synced 2026-03-07 09:57:50 +08:00
- Fix videoRef timing issue by using useEffect for video setup - Stop animation loop on capture to prevent infinite warnings - Fix null canvas reference in generateMnemonicFromEntropy - Add stats review panel with continue/retake options - Add seed generation explanation and blurred preview - Implement seed generation from camera noise/entropy bits and enhance dice rolls with detailed statistical analysis
609 lines
28 KiB
TypeScript
609 lines
28 KiB
TypeScript
import React, { useState, useRef, useEffect } from 'react';
|
||
import { Camera, X, AlertCircle, CheckCircle2 } from 'lucide-react';
|
||
import { InteractionEntropy } from '../lib/interactionEntropy';
|
||
|
||
interface EntropyStats {
|
||
shannon: number;
|
||
variance: number;
|
||
uniqueColors: number;
|
||
brightnessRange: [number, number];
|
||
rgbStats: {
|
||
r: { mean: number; stddev: number };
|
||
g: { mean: number; stddev: number };
|
||
b: { mean: number; stddev: number };
|
||
};
|
||
histogram: number[]; // 10 buckets
|
||
captureTimeMicros: number;
|
||
interactionSamples: number;
|
||
totalBits: number;
|
||
dataSize: number;
|
||
}
|
||
|
||
interface CameraEntropyProps {
|
||
wordCount: 12 | 24;
|
||
onEntropyGenerated: (mnemonic: string, stats: EntropyStats) => void;
|
||
onCancel: () => void;
|
||
interactionEntropy: InteractionEntropy;
|
||
}
|
||
|
||
const CameraEntropy: React.FC<CameraEntropyProps> = ({
|
||
wordCount,
|
||
onEntropyGenerated,
|
||
onCancel,
|
||
interactionEntropy
|
||
}) => {
|
||
const [step, setStep] = useState<'permission' | 'capture' | 'processing' | 'stats'>('permission');
|
||
const [stream, setStream] = useState<MediaStream | null>(null);
|
||
const [entropy, setEntropy] = useState(0);
|
||
const [variance, setVariance] = useState(0);
|
||
const [captureEnabled, setCaptureEnabled] = useState(false);
|
||
const [stats, setStats] = useState<EntropyStats | null>(null);
|
||
const [generatedMnemonic, setGeneratedMnemonic] = useState<string>('');
|
||
const [error, setError] = useState('');
|
||
|
||
const videoRef = useRef<HTMLVideoElement>(null);
|
||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||
const animationRef = useRef<number>();
|
||
|
||
const requestCameraAccess = async () => {
|
||
try {
|
||
console.log('🎥 Requesting camera access...');
|
||
|
||
const mediaStream = await navigator.mediaDevices.getUserMedia({
|
||
video: true,
|
||
audio: false
|
||
});
|
||
|
||
console.log('✅ Camera stream obtained:', {
|
||
tracks: mediaStream.getVideoTracks().map(t => ({
|
||
label: t.label,
|
||
enabled: t.enabled,
|
||
readyState: t.readyState,
|
||
settings: t.getSettings()
|
||
}))
|
||
});
|
||
|
||
setStream(mediaStream);
|
||
setStep('capture');
|
||
|
||
// Don't set up video here - let useEffect handle it after render
|
||
|
||
} catch (err: any) {
|
||
console.error('❌ Camera access error:', err.name, err.message, err);
|
||
setError(`Camera unavailable: ${err.message}`);
|
||
setTimeout(() => onCancel(), 2000);
|
||
}
|
||
};
|
||
|
||
// Set up video element when stream is available
|
||
useEffect(() => {
|
||
if (!stream || !videoRef.current) return;
|
||
|
||
const video = videoRef.current;
|
||
|
||
console.log('📹 Setting up video element with stream...');
|
||
|
||
video.srcObject = stream;
|
||
video.setAttribute('playsinline', '');
|
||
video.setAttribute('autoplay', '');
|
||
video.muted = true;
|
||
|
||
const handleLoadedMetadata = () => {
|
||
console.log('✅ Video metadata loaded:', {
|
||
videoWidth: video.videoWidth,
|
||
videoHeight: video.videoHeight,
|
||
readyState: video.readyState
|
||
});
|
||
|
||
video.play()
|
||
.then(() => {
|
||
console.log('✅ Video playing:', {
|
||
paused: video.paused,
|
||
currentTime: video.currentTime
|
||
});
|
||
|
||
// Wait for actual frame data
|
||
setTimeout(() => {
|
||
// Test if video is actually rendering
|
||
const testCanvas = document.createElement('canvas');
|
||
testCanvas.width = video.videoWidth;
|
||
testCanvas.height = video.videoHeight;
|
||
const testCtx = testCanvas.getContext('2d');
|
||
|
||
if (testCtx && video.videoWidth > 0 && video.videoHeight > 0) {
|
||
testCtx.drawImage(video, 0, 0);
|
||
const imageData = testCtx.getImageData(0, 0, Math.min(10, video.videoWidth), Math.min(10, video.videoHeight));
|
||
const pixels = Array.from(imageData.data.slice(0, 40));
|
||
console.log('🎨 First 40 pixel values:', pixels);
|
||
|
||
const allZero = pixels.every(p => p === 0);
|
||
const allSame = pixels.every(p => p === pixels[0]);
|
||
|
||
if (allZero) {
|
||
console.error('❌ All pixels are zero - video not rendering!');
|
||
} else if (allSame) {
|
||
console.warn('⚠️ All pixels same value - possible issue');
|
||
} else {
|
||
console.log('✅ Video has actual frame data');
|
||
}
|
||
}
|
||
|
||
startEntropyAnalysis();
|
||
}, 300);
|
||
})
|
||
.catch(err => {
|
||
console.error('❌ video.play() failed:', err);
|
||
setError('Failed to start video preview: ' + err.message);
|
||
});
|
||
};
|
||
|
||
const handleVideoError = (err: any) => {
|
||
console.error('❌ Video element error:', err);
|
||
setError('Video playback error');
|
||
};
|
||
|
||
video.addEventListener('loadedmetadata', handleLoadedMetadata);
|
||
video.addEventListener('error', handleVideoError);
|
||
|
||
return () => {
|
||
video.removeEventListener('loadedmetadata', handleLoadedMetadata);
|
||
video.removeEventListener('error', handleVideoError);
|
||
};
|
||
}, [stream]); // Run when stream changes
|
||
|
||
const startEntropyAnalysis = () => {
|
||
console.log('🔍 Starting entropy analysis...');
|
||
|
||
const analyze = () => {
|
||
const video = videoRef.current;
|
||
const canvas = canvasRef.current;
|
||
|
||
if (!video || !canvas) {
|
||
// If we are in processing/stats step, don't warn, just stop
|
||
// This prevents race conditions during capture
|
||
return;
|
||
}
|
||
|
||
// Critical: Wait for valid dimensions
|
||
if (video.videoWidth === 0 || video.videoHeight === 0) {
|
||
console.warn('⚠️ Video dimensions are 0, waiting...', {
|
||
videoWidth: video.videoWidth,
|
||
videoHeight: video.videoHeight,
|
||
readyState: video.readyState
|
||
});
|
||
animationRef.current = requestAnimationFrame(analyze);
|
||
return;
|
||
}
|
||
|
||
const ctx = canvas.getContext('2d', { willReadFrequently: true });
|
||
if (!ctx) {
|
||
console.error('❌ Failed to get canvas context');
|
||
return;
|
||
}
|
||
|
||
// Set canvas size to match video
|
||
if (canvas.width !== video.videoWidth || canvas.height !== video.videoHeight) {
|
||
canvas.width = video.videoWidth;
|
||
canvas.height = video.videoHeight;
|
||
console.log('📐 Canvas resized to:', canvas.width, 'x', canvas.height);
|
||
}
|
||
|
||
try {
|
||
ctx.drawImage(video, 0, 0);
|
||
const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
|
||
|
||
// Check if we got actual data
|
||
if (imageData.data.length === 0) {
|
||
console.error('❌ ImageData is empty');
|
||
animationRef.current = requestAnimationFrame(analyze);
|
||
return;
|
||
}
|
||
|
||
const { entropy: e, variance: v } = calculateQuickEntropy(imageData);
|
||
|
||
setEntropy(e);
|
||
setVariance(v);
|
||
setCaptureEnabled(e >= 7.5 && v >= 1000);
|
||
|
||
} catch (err) {
|
||
console.error('❌ Error in entropy analysis:', err);
|
||
}
|
||
|
||
animationRef.current = requestAnimationFrame(analyze);
|
||
};
|
||
|
||
analyze();
|
||
};
|
||
|
||
const calculateQuickEntropy = (imageData: ImageData): { entropy: number; variance: number } => {
|
||
const data = imageData.data;
|
||
const histogram = new Array(256).fill(0);
|
||
let sum = 0;
|
||
let count = 0;
|
||
|
||
// Sample every 16th pixel for performance
|
||
for (let i = 0; i < data.length; i += 16) {
|
||
const gray = Math.floor((data[i] + data[i + 1] + data[i + 2]) / 3);
|
||
histogram[gray]++;
|
||
sum += gray;
|
||
count++;
|
||
}
|
||
|
||
const mean = sum / count;
|
||
|
||
// Shannon entropy
|
||
let entropy = 0;
|
||
for (const h_count of histogram) {
|
||
if (h_count > 0) {
|
||
const p = h_count / count;
|
||
entropy -= p * Math.log2(p);
|
||
}
|
||
}
|
||
|
||
// Variance
|
||
let variance = 0;
|
||
for (let i = 0; i < data.length; i += 16) {
|
||
const gray = Math.floor((data[i] + data[i + 1] + data[i + 2]) / 3);
|
||
variance += Math.pow(gray - mean, 2);
|
||
}
|
||
variance = variance / count;
|
||
|
||
return { entropy, variance };
|
||
};
|
||
|
||
const captureEntropy = async () => {
|
||
if (!videoRef.current || !canvasRef.current) return;
|
||
|
||
// CRITICAL: Stop the analysis loop immediately
|
||
if (animationRef.current) {
|
||
cancelAnimationFrame(animationRef.current);
|
||
console.log('🛑 Stopped entropy analysis loop');
|
||
}
|
||
|
||
setStep('processing');
|
||
|
||
const canvas = canvasRef.current;
|
||
const ctx = canvas.getContext('2d', { willReadFrequently: true });
|
||
if (!ctx) return;
|
||
|
||
canvas.width = videoRef.current.videoWidth;
|
||
canvas.height = videoRef.current.videoHeight;
|
||
ctx.drawImage(videoRef.current, 0, 0, canvas.width, canvas.height);
|
||
|
||
const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
|
||
const captureTime = performance.now();
|
||
|
||
// Full entropy analysis
|
||
const fullStats = await calculateFullEntropy(imageData, captureTime);
|
||
|
||
// Generate mnemonic from entropy
|
||
const mnemonic = await generateMnemonicFromEntropy(fullStats, wordCount, canvas);
|
||
|
||
setStats(fullStats);
|
||
setStep('stats');
|
||
|
||
// Stop camera
|
||
if (stream) {
|
||
stream.getTracks().forEach(track => track.stop());
|
||
console.log('📷 Camera stopped');
|
||
}
|
||
|
||
// Don't call onEntropyGenerated yet - let user review stats first
|
||
setGeneratedMnemonic(mnemonic);
|
||
};
|
||
|
||
const calculateFullEntropy = async (
|
||
imageData: ImageData,
|
||
captureTime: number
|
||
): Promise<EntropyStats> => {
|
||
const data = imageData.data;
|
||
const pixels = data.length / 4;
|
||
|
||
const r: number[] = [], g: number[] = [], b: number[] = [];
|
||
const histogram = new Array(10).fill(0);
|
||
const colorSet = new Set<number>();
|
||
let minBright = 255, maxBright = 0;
|
||
const allGray: number[] = [];
|
||
|
||
for (let i = 0; i < data.length; i += 4) {
|
||
r.push(data[i]);
|
||
g.push(data[i + 1]);
|
||
b.push(data[i + 2]);
|
||
|
||
const brightness = Math.floor((data[i] + data[i + 1] + data[i + 2]) / 3);
|
||
allGray.push(brightness);
|
||
const bucket = Math.floor(brightness / 25.6);
|
||
histogram[Math.min(bucket, 9)]++;
|
||
|
||
minBright = Math.min(minBright, brightness);
|
||
maxBright = Math.max(maxBright, brightness);
|
||
|
||
const color = (data[i] << 16) | (data[i + 1] << 8) | data[i + 2];
|
||
colorSet.add(color);
|
||
}
|
||
|
||
const grayHistogram = new Array(256).fill(0);
|
||
for (const gray of allGray) {
|
||
grayHistogram[gray]++;
|
||
}
|
||
|
||
let shannon = 0;
|
||
for (const count of grayHistogram) {
|
||
if (count > 0) {
|
||
const p = count / pixels;
|
||
shannon -= p * Math.log2(p);
|
||
}
|
||
}
|
||
|
||
const calcStats = (arr: number[]): { mean: number; stddev: number } => {
|
||
const mean = arr.reduce((a, b) => a + b, 0) / arr.length;
|
||
const variance = arr.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / arr.length;
|
||
return { mean, stddev: Math.sqrt(variance) };
|
||
};
|
||
|
||
const rgbStats = { r: calcStats(r), g: calcStats(g), b: calcStats(b) };
|
||
const variance = calcStats(allGray).stddev ** 2;
|
||
|
||
return {
|
||
shannon,
|
||
variance,
|
||
uniqueColors: colorSet.size,
|
||
brightnessRange: [minBright, maxBright],
|
||
rgbStats,
|
||
histogram,
|
||
captureTimeMicros: Math.floor((captureTime % 1) * 1000000),
|
||
interactionSamples: interactionEntropy.getSampleCount().total,
|
||
totalBits: 256,
|
||
dataSize: data.length
|
||
};
|
||
};
|
||
|
||
const generateMnemonicFromEntropy = async (
|
||
stats: EntropyStats,
|
||
wordCount: 12 | 24,
|
||
canvas: HTMLCanvasElement
|
||
): Promise<string> => {
|
||
// Mix multiple entropy sources
|
||
const imageDataUrl = canvas.toDataURL(); // Now canvas is guaranteed not null
|
||
|
||
const interactionBytes = await interactionEntropy.getEntropyBytes();
|
||
const cryptoBytes = crypto.getRandomValues(new Uint8Array(32));
|
||
|
||
const combined = [
|
||
imageDataUrl,
|
||
stats.captureTimeMicros.toString(),
|
||
Array.from(interactionBytes).join(','),
|
||
Array.from(cryptoBytes).join(','),
|
||
performance.now().toString()
|
||
].join('|');
|
||
|
||
const encoder = new TextEncoder();
|
||
const data = encoder.encode(combined);
|
||
const hash = await crypto.subtle.digest('SHA-256', data);
|
||
|
||
// Use bip39 to generate mnemonic from the collected entropy hash
|
||
const { entropyToMnemonic } = await import('bip39');
|
||
const entropyLength = wordCount === 12 ? 16 : 32;
|
||
const finalEntropy = new Uint8Array(hash).slice(0, entropyLength);
|
||
|
||
// The bip39 library expects a hex string or a Buffer.
|
||
const entropyHex = Buffer.from(finalEntropy).toString('hex');
|
||
|
||
return entropyToMnemonic(entropyHex);
|
||
};
|
||
|
||
useEffect(() => {
|
||
return () => {
|
||
// Cleanup on unmount
|
||
if (animationRef.current) {
|
||
cancelAnimationFrame(animationRef.current);
|
||
}
|
||
if (stream) {
|
||
stream.getTracks().forEach(track => track.stop());
|
||
}
|
||
};
|
||
}, [stream]);
|
||
|
||
const getStatusMessage = () => {
|
||
if (entropy >= 7.0 && variance >= 800) {
|
||
return { icon: CheckCircle2, text: '✅ Excellent entropy - ready!', color: '#39ff14' };
|
||
} else if (entropy >= 6.0 && variance >= 500) {
|
||
return { icon: AlertCircle, text: '🟡 Good - point to brighter area', color: '#ffd700' };
|
||
} else if (entropy >= 5.0) {
|
||
return { icon: AlertCircle, text: '🟠 Low - find textured surface', color: '#ff9500' };
|
||
} else {
|
||
return { icon: AlertCircle, text: '🔴 Too low - point at lamp/pattern', color: '#ff006e' };
|
||
}
|
||
};
|
||
|
||
return (
|
||
<div className="space-y-4">
|
||
{step === 'permission' && (
|
||
<div className="p-6 bg-[#16213e] rounded-xl border-2 border-[#00f0ff]/30 space-y-4">
|
||
<div className="text-center space-y-2">
|
||
<Camera size={48} className="mx-auto text-[#00f0ff]" />
|
||
<h3 className="text-sm font-bold text-[#00f0ff] uppercase">Camera Permission Needed</h3>
|
||
</div>
|
||
<div className="space-y-2 text-xs text-[#6ef3f7]">
|
||
<p>To generate entropy, we need:</p>
|
||
<ul className="list-disc list-inside space-y-1 pl-2">
|
||
<li>Camera access to capture pixel noise</li>
|
||
<li>Image data processed locally</li>
|
||
<li>Never stored or transmitted</li>
|
||
<li>Camera auto-closes after use</li>
|
||
</ul>
|
||
</div>
|
||
<div className="flex gap-3">
|
||
<button onClick={requestCameraAccess} className="flex-1 py-2.5 bg-[#00f0ff] text-[#0a0a0f] rounded-lg font-bold text-sm hover:bg-[#00f0ff] hover:shadow-[0_0_20px_rgba(0,240,255,0.5)] transition-all">Allow Camera</button>
|
||
<button onClick={onCancel} className="flex-1 py-2.5 bg-[#16213e] border-2 border-[#ff006e] text-[#ff006e] rounded-lg font-bold text-sm hover:bg-[#ff006e]/20 transition-all">Cancel</button>
|
||
</div>
|
||
</div>
|
||
)}
|
||
|
||
{step === 'capture' && (
|
||
<div className="space-y-4">
|
||
<div className="relative rounded-xl overflow-hidden border-2 border-[#00f0ff]/30 bg-black">
|
||
<video
|
||
ref={videoRef}
|
||
playsInline
|
||
autoPlay
|
||
muted
|
||
className="w-full"
|
||
style={{
|
||
maxHeight: '300px',
|
||
objectFit: 'cover',
|
||
border: '2px solid #00f0ff',
|
||
backgroundColor: '#000'
|
||
}}
|
||
/>
|
||
<canvas
|
||
ref={canvasRef}
|
||
className="hidden"
|
||
style={{ display: 'none' }}
|
||
/>
|
||
</div>
|
||
<div className="p-4 bg-[#16213e] rounded-xl border-2 border-[#00f0ff]/30 space-y-3">
|
||
<div className="text-xs text-[#6ef3f7] space-y-1">
|
||
<p className="font-bold text-[#00f0ff]">Instructions:</p>
|
||
<p>Point camera at bright, textured surface (lamp, carpet, wall with pattern)</p>
|
||
</div>
|
||
<div className="space-y-2">
|
||
<div className="flex items-center justify-between text-xs">
|
||
<span className="text-[#00f0ff]">Entropy Quality:</span>
|
||
<span className="font-mono text-[#00f0ff]">{entropy.toFixed(2)}/8.0</span>
|
||
</div>
|
||
<div className="w-full bg-[#0a0a0f] rounded-full h-2 overflow-hidden">
|
||
<div className="h-full transition-all" style={{ width: `${(entropy / 8) * 100}%`, backgroundColor: getStatusMessage().color }} />
|
||
</div>
|
||
<div className="text-xs font-medium" style={{ color: getStatusMessage().color }}>{getStatusMessage().text}</div>
|
||
</div>
|
||
<div className="flex gap-3">
|
||
<button onClick={captureEntropy} disabled={!captureEnabled} className="flex-1 py-2.5 bg-gradient-to-r from-[#ff006e] to-[#ff4d8f] text-white text-sm rounded-lg font-bold uppercase disabled:opacity-30 disabled:cursor-not-allowed hover:shadow-[0_0_20px_rgba(255,0,110,0.5)] transition-all">
|
||
<Camera className="inline mr-2" size={16} />Capture
|
||
</button>
|
||
<button onClick={onCancel} className="px-4 py-2.5 bg-[#16213e] border-2 border-[#ff006e] text-[#ff006e] rounded-lg font-bold text-sm hover:bg-[#ff006e]/20 transition-all"><X size={16} /></button>
|
||
</div>
|
||
</div>
|
||
</div>
|
||
)}
|
||
|
||
{step === 'processing' && (
|
||
<div className="p-6 bg-[#16213e] rounded-xl border-2 border-[#00f0ff]/30 text-center space-y-3">
|
||
<div className="animate-spin mx-auto w-12 h-12 border-4 border-[#00f0ff]/30 border-t-[#00f0ff] rounded-full" />
|
||
<p className="text-sm text-[#00f0ff]">Processing entropy...</p>
|
||
</div>
|
||
)}
|
||
|
||
{step === 'stats' && stats && (
|
||
<div className="p-4 bg-[#0a0a0f] rounded-xl border-2 border-[#39ff14] shadow-[0_0_30px_rgba(57,255,20,0.3)] space-y-4">
|
||
<div className="flex items-center gap-2 text-[#39ff14]"><CheckCircle2 size={24} /><h3 className="text-sm font-bold uppercase">Entropy Analysis</h3></div>
|
||
<div className="space-y-3 text-xs">
|
||
<div><p className="text-[#00f0ff] font-bold mb-1">Primary Source:</p><p className="text-[#6ef3f7]">Camera Sensor Noise</p></div>
|
||
<div>
|
||
<p className="text-[#00f0ff] font-bold mb-2">RANDOMNESS METRICS:</p>
|
||
<div className="grid grid-cols-2 gap-x-4 gap-y-1 font-mono text-[10px]">
|
||
<div>Shannon Entropy:</div><div className="text-[#39ff14]">{stats.shannon.toFixed(2)}/8.00</div>
|
||
<div>Pixel Variance:</div><div className="text-[#39ff14]">{stats.variance.toFixed(1)}</div>
|
||
<div>Unique Colors:</div><div className="text-[#39ff14]">{stats.uniqueColors.toLocaleString()}</div>
|
||
<div>Brightness Range:</div><div className="text-[#39ff14]">{stats.brightnessRange[0]}-{stats.brightnessRange[1]}</div>
|
||
</div>
|
||
</div>
|
||
<div>
|
||
<p className="text-[#00f0ff] font-bold mb-2">RGB DISTRIBUTION:</p>
|
||
<div className="space-y-1 font-mono text-[10px]">
|
||
<div className="flex justify-between"><span>Red:</span><span className="text-[#ff6b6b]">μ={stats.rgbStats.r.mean.toFixed(0)} σ={stats.rgbStats.r.stddev.toFixed(1)}</span></div>
|
||
<div className="flex justify-between"><span>Green:</span><span className="text-[#51cf66]">μ={stats.rgbStats.g.mean.toFixed(0)} σ={stats.rgbStats.g.stddev.toFixed(1)}</span></div>
|
||
<div className="flex justify-between"><span>Blue:</span><span className="text-[#339af0]">μ={stats.rgbStats.b.mean.toFixed(0)} σ={stats.rgbStats.b.stddev.toFixed(1)}</span></div>
|
||
</div>
|
||
</div>
|
||
<div>
|
||
<p className="text-[#00f0ff] font-bold mb-2">BRIGHTNESS HISTOGRAM:</p>
|
||
<div className="flex items-end justify-between h-12 gap-0.5">{stats.histogram.map((val, i) => { const max = Math.max(...stats.histogram); const height = (val / max) * 100; return (<div key={i} className="flex-1 bg-[#00f0ff] rounded-t" style={{ height: `${height}%` }} />); })}</div>
|
||
<div className="flex justify-between text-[9px] text-[#6ef3f7] mt-1"><span>Dark</span><span>Bright</span></div>
|
||
</div>
|
||
<div>
|
||
<p className="text-[#00f0ff] font-bold mb-2">TIMING ENTROPY:</p>
|
||
<div className="grid grid-cols-2 gap-x-4 gap-y-1 font-mono text-[10px]">
|
||
<div>Capture timing:</div><div className="text-[#39ff14]">...{stats.captureTimeMicros}μs</div>
|
||
<div>Interaction samples:</div><div className="text-[#39ff14]">{stats.interactionSamples}</div>
|
||
</div>
|
||
</div>
|
||
<div>
|
||
<p className="text-[#00f0ff] font-bold mb-2">MIXED WITH:</p>
|
||
<div className="space-y-1 text-[#6ef3f7] text-[10px]">
|
||
<div>- crypto.getRandomValues() ✓</div>
|
||
<div>- performance.now() ✓</div>
|
||
<div>- Mouse/keyboard timing ✓</div>
|
||
</div>
|
||
</div>
|
||
<div className="pt-2 border-t border-[#00f0ff]/30">
|
||
<div className="flex justify-between items-center">
|
||
<span className="text-[#00f0ff] font-bold">Total Entropy:</span>
|
||
<span className="text-lg font-bold text-[#39ff14]">{stats.totalBits} bits</span>
|
||
</div>
|
||
</div>
|
||
|
||
<div>
|
||
<p className="text-[#00f0ff] font-bold mb-2">HOW SEED IS GENERATED:</p>
|
||
<div className="space-y-1 text-[10px] text-[#6ef3f7]">
|
||
<div>1. Camera captures {stats.uniqueColors.toLocaleString()} unique pixel colors</div>
|
||
<div>2. Pixel data hashed with SHA-256 ({(stats.dataSize / 1024).toFixed(1)}KB raw data)</div>
|
||
<div>3. Mixed with timing entropy ({stats.captureTimeMicros}μs precision)</div>
|
||
<div>4. Combined with {stats.interactionSamples} user interaction samples</div>
|
||
<div>5. Enhanced with crypto.getRandomValues() (32 bytes)</div>
|
||
<div>6. Final hash → {wordCount === 12 ? '128' : '256'} bits → {wordCount} BIP39 words</div>
|
||
</div>
|
||
</div>
|
||
|
||
<div>
|
||
<p className="text-[#00f0ff] font-bold mb-2">GENERATED SEED:</p>
|
||
<div className="p-3 bg-[#0a0a0f] rounded-lg border border-[#39ff1450]">
|
||
<p className="font-mono text-[10px] text-[#39ff14] blur-sm hover:blur-none transition-all cursor-pointer"
|
||
title="Hover to reveal">
|
||
{generatedMnemonic}
|
||
</p>
|
||
<p className="text-[9px] text-[#6ef3f7] mt-1">
|
||
⚠️ Hover to reveal - Write this down securely
|
||
</p>
|
||
</div>
|
||
</div>
|
||
|
||
<div className="pt-4 border-t border-[#00f0ff30] space-y-3">
|
||
<button
|
||
onClick={() => {
|
||
// Now send to parent
|
||
onEntropyGenerated(generatedMnemonic, stats);
|
||
}}
|
||
className="w-full py-3 bg-gradient-to-r from-[#ff006e] to-[#ff4d8f] text-white text-sm rounded-xl font-bold uppercase hover:shadow-[0_0_30px_rgba(255,0,110,0.8)] transition-all"
|
||
>
|
||
Continue with this Seed
|
||
</button>
|
||
|
||
<button
|
||
onClick={() => {
|
||
// Reset and try again
|
||
setStep('permission');
|
||
setStats(null);
|
||
setGeneratedMnemonic('');
|
||
setEntropy(0);
|
||
setVariance(0);
|
||
}}
|
||
className="w-full py-2 bg-[#16213e] border-2 border-[#00f0ff] text-[#00f0ff] rounded-lg text-sm font-bold hover:bg-[#00f0ff20] transition-all"
|
||
>
|
||
Retake Photo
|
||
</button>
|
||
</div>
|
||
</div>
|
||
</div>
|
||
)}
|
||
|
||
{error && (
|
||
<div className="p-4 bg-[#1a1a2e] border-2 border-[#ff006e] rounded-lg">
|
||
<p className="text-xs text-[#ff006e]">{error}</p>
|
||
</div>
|
||
)}
|
||
</div>
|
||
);
|
||
};
|
||
|
||
export default CameraEntropy; |