103 lines
3.6 KiB
HTML
103 lines
3.6 KiB
HTML
<!DOCTYPE html>
|
|
<html>
|
|
<head>
|
|
<title>Vosk Audio Streaming</title>
|
|
<script src="https://cdn.socket.io/4.5.4/socket.io.min.js"></script>
|
|
<style>
|
|
body { font-family: Arial, sans-serif; max-width: 800px; margin: 0 auto; padding: 20px; }
|
|
#transcript { border: 1px solid #ccc; padding: 15px; min-height: 100px; margin-top: 20px; }
|
|
button { padding: 10px 15px; background: #007bff; color: white; border: none; cursor: pointer; }
|
|
button:disabled { background: #cccccc; }
|
|
</style>
|
|
</head>
|
|
<body>
|
|
<h1>Real-time Speech Recognition</h1>
|
|
<button id="startBtn">Start Listening</button>
|
|
<button id="stopBtn" disabled>Stop</button>
|
|
<div id="transcript"></div>
|
|
|
|
<script>
|
|
const socket = io('http://localhost:5000');
|
|
const startBtn = document.getElementById('startBtn');
|
|
const stopBtn = document.getElementById('stopBtn');
|
|
const transcriptDiv = document.getElementById('transcript');
|
|
|
|
let mediaStream;
|
|
let audioContext;
|
|
let processor;
|
|
let microphone;
|
|
|
|
// Handle server responses
|
|
socket.on('transcription', (text) => {
|
|
transcriptDiv.innerHTML += `<p>${text}</p>`;
|
|
});
|
|
|
|
socket.on('partial_transcription', (text) => {
|
|
const lastP = transcriptDiv.lastElementChild;
|
|
if (lastP && lastP.classList.contains('partial')) {
|
|
lastP.textContent = text;
|
|
} else {
|
|
const p = document.createElement('p');
|
|
p.className = 'partial';
|
|
p.textContent = text;
|
|
transcriptDiv.appendChild(p);
|
|
}
|
|
});
|
|
|
|
// Start recording
|
|
startBtn.addEventListener('click', async () => {
|
|
try {
|
|
startBtn.disabled = true;
|
|
stopBtn.disabled = false;
|
|
transcriptDiv.innerHTML = '';
|
|
|
|
mediaStream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
audioContext = new (window.AudioContext || window.webkitAudioContext)();
|
|
microphone = audioContext.createMediaStreamSource(mediaStream);
|
|
|
|
processor = audioContext.createScriptProcessor(4096, 1, 1);
|
|
processor.onaudioprocess = (e) => {
|
|
const audioData = e.inputBuffer.getChannelData(0);
|
|
const raw = convertFloat32ToInt16(audioData);
|
|
socket.emit('audio_stream', raw);
|
|
};
|
|
|
|
microphone.connect(processor);
|
|
processor.connect(audioContext.destination);
|
|
|
|
} catch (error) {
|
|
console.error('Error:', error);
|
|
alert('Error accessing microphone: ' + error.message);
|
|
resetControls();
|
|
}
|
|
});
|
|
|
|
// Stop recording
|
|
stopBtn.addEventListener('click', () => {
|
|
if (mediaStream) {
|
|
mediaStream.getTracks().forEach(track => track.stop());
|
|
}
|
|
if (microphone && processor) {
|
|
microphone.disconnect();
|
|
processor.disconnect();
|
|
}
|
|
resetControls();
|
|
});
|
|
|
|
// Helper functions
|
|
function resetControls() {
|
|
startBtn.disabled = false;
|
|
stopBtn.disabled = true;
|
|
}
|
|
|
|
function convertFloat32ToInt16(buffer) {
|
|
const l = buffer.length;
|
|
const buf = new Int16Array(l);
|
|
for (let i = 0; i < l; i++) {
|
|
buf[i] = Math.min(1, buffer[i]) * 0x7FFF;
|
|
}
|
|
return buf.buffer;
|
|
}
|
|
</script>
|
|
</body>
|
|
</html> |