Spaces:
Running
Running
<html lang="en"> | |
<head> | |
<meta charset="UTF-8"> | |
<meta name="viewport" content="width=device-width, initial-scale=1.0"> | |
<title>3D Passive Doppler Radar</title> | |
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r128/three.min.js"></script> | |
<style> | |
body { | |
margin: 0; | |
background: #000; | |
color: #00ff00; | |
font-family: monospace; | |
overflow: hidden; | |
} | |
#container { | |
position: relative; | |
width: 100vw; | |
height: 100vh; | |
} | |
#overlay { | |
position: absolute; | |
top: 10px; | |
left: 10px; | |
background: rgba(0,0,0,0.7); | |
padding: 10px; | |
border: 1px solid #00ff00; | |
} | |
#frequencyDisplay { | |
margin: 10px 0; | |
} | |
button { | |
background: #000; | |
color: #00ff00; | |
border: 1px solid #00ff00; | |
padding: 5px 10px; | |
cursor: pointer; | |
margin: 5px; | |
} | |
button:hover { | |
background: #00ff00; | |
color: #000; | |
} | |
#spectrum { | |
width: 300px; | |
height: 100px; | |
border: 1px solid #00ff00; | |
} | |
</style> | |
</head> | |
<body> | |
<div id="container"> | |
<div id="overlay"> | |
<button id="startBtn">Start Detection</button> | |
<button id="stopBtn">Stop Detection</button> | |
<div id="frequencyDisplay">Frequency: 0 Hz</div> | |
<div>Objects detected: <span id="objectCount">0</span></div> | |
<canvas id="spectrum"></canvas> | |
</div> | |
</div> | |
<script> | |
let scene, camera, renderer, audioContext, analyser, microphone; | |
let points = []; | |
let isRunning = false; | |
// Initialize Three.js scene | |
function initScene() { | |
scene = new THREE.Scene(); | |
camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 1000); | |
renderer = new THREE.WebGLRenderer(); | |
renderer.setSize(window.innerWidth, window.innerHeight); | |
document.getElementById('container').appendChild(renderer.domElement); | |
// Add grid | |
const gridHelper = new THREE.GridHelper(20, 20, 0x00ff00, 0x003300); | |
scene.add(gridHelper); | |
// Position camera | |
camera.position.z = 15; | |
camera.position.y = 10; | |
camera.lookAt(0, 0, 0); | |
// Add ambient light | |
const ambientLight = new THREE.AmbientLight(0x404040); | |
scene.add(ambientLight); | |
// Add directional light | |
const directionalLight = new THREE.DirectionalLight(0x00ff00, 0.5); | |
directionalLight.position.set(1, 1, 1); | |
scene.add(directionalLight); | |
} | |
async function initAudio() { | |
audioContext = new (window.AudioContext || window.webkitAudioContext)(); | |
analyser = audioContext.createAnalyser(); | |
analyser.fftSize = 2048; | |
try { | |
const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); | |
microphone = audioContext.createMediaStreamSource(stream); | |
microphone.connect(analyser); | |
} catch (err) { | |
console.error('Microphone access denied:', err); | |
return false; | |
} | |
return true; | |
} | |
function createPoint(position, intensity) { | |
const geometry = new THREE.SphereGeometry(0.1, 8, 8); | |
const material = new THREE.MeshPhongMaterial({ | |
color: new THREE.Color(`rgb(0, ${Math.floor(intensity * 255)}, 0)`), | |
transparent: true, | |
opacity: 0.7 | |
}); | |
const point = new THREE.Mesh(geometry, material); | |
point.position.copy(position); | |
scene.add(point); | |
points.push({ | |
mesh: point, | |
lifetime: 100, | |
intensity: intensity | |
}); | |
} | |
function updatePoints() { | |
for (let i = points.length - 1; i >= 0; i--) { | |
points[i].lifetime--; | |
points[i].mesh.material.opacity = points[i].lifetime / 100; | |
if (points[i].lifetime <= 0) { | |
scene.remove(points[i].mesh); | |
points.splice(i, 1); | |
} | |
} | |
} | |
function analyzeAudio() { | |
if (!isRunning) return; | |
const bufferLength = analyser.frequencyBinCount; | |
const dataArray = new Uint8Array(bufferLength); | |
analyser.getByteFrequencyData(dataArray); | |
// Update spectrum visualization | |
const spectrumCanvas = document.getElementById('spectrum'); | |
const spectrumCtx = spectrumCanvas.getContext('2d'); | |
spectrumCtx.fillStyle = 'black'; | |
spectrumCtx.fillRect(0, 0, spectrumCanvas.width, spectrumCanvas.height); | |
const barWidth = spectrumCanvas.width / bufferLength; | |
let x = 0; | |
for (let i = 0; i < bufferLength; i++) { | |
const barHeight = dataArray[i] * spectrumCanvas.height / 255; | |
spectrumCtx.fillStyle = `rgb(0, ${dataArray[i]}, 0)`; | |
spectrumCtx.fillRect(x, spectrumCanvas.height - barHeight, barWidth, barHeight); | |
x += barWidth; | |
// Create 3D points based on frequency intensity | |
if (dataArray[i] > 100) { | |
const angle = (Math.PI * 2 * i) / bufferLength; | |
const radius = (dataArray[i] / 255) * 10; | |
const x = Math.cos(angle) * radius; | |
const z = Math.sin(angle) * radius; | |
const y = (dataArray[i] / 255) * 5; | |
createPoint( | |
new THREE.Vector3(x, y, z), | |
dataArray[i] / 255 | |
); | |
} | |
} | |
// Calculate dominant frequency | |
let maxIndex = 0; | |
let maxValue = 0; | |
for (let i = 0; i < bufferLength; i++) { | |
if (dataArray[i] > maxValue) { | |
maxValue = dataArray[i]; | |
maxIndex = i; | |
} | |
} | |
const dominantFrequency = maxIndex * audioContext.sampleRate / (analyser.fftSize * 2); | |
document.getElementById('frequencyDisplay').textContent = | |
`Frequency: ${Math.round(dominantFrequency)} Hz`; | |
document.getElementById('objectCount').textContent = points.length; | |
requestAnimationFrame(analyzeAudio); | |
} | |
function animate() { | |
requestAnimationFrame(animate); | |
if (isRunning) { | |
updatePoints(); | |
// Rotate camera slowly | |
camera.position.x = Math.sin(Date.now() * 0.001) * 15; | |
camera.position.z = Math.cos(Date.now() * 0.001) * 15; | |
camera.lookAt(0, 0, 0); | |
} | |
renderer.render(scene, camera); | |
} | |
async function startDetection() { | |
if (!audioContext && !(await initAudio())) return; | |
isRunning = true; | |
analyzeAudio(); | |
} | |
function stopDetection() { | |
isRunning = false; | |
if (microphone) { | |
microphone.disconnect(); | |
microphone = null; | |
} | |
if (audioContext) { | |
audioContext.close(); | |
audioContext = null; | |
} | |
// Clear all points | |
points.forEach(point => scene.remove(point.mesh)); | |
points = []; | |
} | |
// Event listeners | |
document.getElementById('startBtn').addEventListener('click', startDetection); | |
document.getElementById('stopBtn').addEventListener('click', stopDetection); | |
window.addEventListener('resize', () => { | |
camera.aspect = window.innerWidth / window.innerHeight; | |
camera.updateProjectionMatrix(); | |
renderer.setSize(window.innerWidth, window.innerHeight); | |
}); | |
// Initialize and start animation | |
initScene(); | |
animate(); | |
</script> | |
</body> | |
</html><script async data-explicit-opt-in="true" data-cookie-opt-in="true" src="https://vercel.live/_next-live/feedback/feedback.js"></script> |