Skip to content

Instantly share code, notes, and snippets.

@adrenak
Last active September 5, 2025 20:02
Show Gist options
  • Select an option

  • Save adrenak/d065d0f4ce7437dff7c29f4f8282e84c to your computer and use it in GitHub Desktop.

Select an option

Save adrenak/d065d0f4ce7437dff7c29f4f8282e84c to your computer and use it in GitHub Desktop.
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Speech to Text in Three.js</title>
<style>
html, body {
margin: 0;
padding: 0;
overflow: hidden;
width: 100%;
height: 100%;
background: #000;
font-family: sans-serif;
}
#info {
position: absolute;
top: 10px;
left: 10px;
color: white;
font-size: 14px;
}
</style>
</head>
<body>
<div id="info">🎤 Say something…</div>
<script src="https://cdn.jsdelivr.net/npm/[email protected]/build/three.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/[email protected]/examples/js/loaders/FontLoader.js"></script>
<script src="https://cdn.jsdelivr.net/npm/[email protected]/examples/js/geometries/TextGeometry.js"></script>
<script>
// Setup Three.js
const scene = new THREE.Scene();
const camera = new THREE.PerspectiveCamera(75, window.innerWidth/window.innerHeight, 0.1, 1000);
camera.position.z = 20;
const renderer = new THREE.WebGLRenderer({ antialias: true });
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
const light = new THREE.DirectionalLight(0xffffff, 1);
light.position.set(0, 1, 1).normalize();
scene.add(light);
let textMesh;
let font;
// Load font
const loader = new THREE.FontLoader();
loader.load("https://cdn.jsdelivr.net/npm/[email protected]/examples/fonts/helvetiker_regular.typeface.json", f => {
font = f;
updateText("Say something…");
});
function updateText(msg) {
if (!font) return;
if (textMesh) scene.remove(textMesh);
const geometry = new THREE.TextGeometry(msg, {
font: font,
size: 2,
height: 0.5,
curveSegments: 12,
});
const material = new THREE.MeshPhongMaterial({ color: 0x44ccff });
textMesh = new THREE.Mesh(geometry, material);
geometry.computeBoundingBox();
const centerOffset = -0.5 * (geometry.boundingBox.max.x - geometry.boundingBox.min.x);
textMesh.position.x = centerOffset;
textMesh.position.y = 0;
scene.add(textMesh);
}
// Render loop
function animate() {
requestAnimationFrame(animate);
if (textMesh) {
textMesh.rotation.y += 0.01;
}
renderer.render(scene, camera);
}
animate();
// Web Speech API
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
if (SpeechRecognition) {
const recognition = new SpeechRecognition();
recognition.continuous = true;
recognition.interimResults = true;
recognition.lang = "en-US";
recognition.onresult = (event) => {
let transcript = "";
for (let i = event.resultIndex; i < event.results.length; i++) {
transcript += event.results[i][0].transcript;
}
document.getElementById("info").innerText = transcript;
updateText(transcript);
};
recognition.onerror = (event) => {
document.getElementById("info").innerText = "Error: " + event.error;
};
recognition.start();
} else {
document.getElementById("info").innerText = "❌ SpeechRecognition not supported in this browser";
}
// Resize handling
window.addEventListener("resize", () => {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
});
</script>
</body>
</html>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment