Files
sp/src/pages/tmp.astro
suvodip ghosh 6071cd5228 update feat
2025-06-06 05:29:39 +00:00

100 lines
3.7 KiB
Plaintext

---
import Layout from "../layouts/Layout.astro";
---
<Layout title="">
<div>
<button id="startBtn">Start Speaking</button>
<button id="stopBtn">Stop</button>
<div id="outputText" style="min-height: 100px; border: 1px solid #ccc; padding: 10px;"></div>
</div>
</Layout>
<script is:inline>
/**
* Speech-to-Text function that listens to microphone input and displays transcribed text
* @param {HTMLElement} outputElement - Element where the transcribed text will be displayed
* @param {function} onResult - Optional callback function that receives the transcribed text
* @returns {object} An object with start and stop methods to control the recognition
*/
function speechToText(outputElement, onResult) {
// Check if browser supports the Web Speech API
if (!('webkitSpeechRecognition' in window) && !('SpeechRecognition' in window)) {
alert('Your browser does not support speech recognition. Try Chrome or Edge.');
return;
}
// Create speech recognition object
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
const recognition = new SpeechRecognition();
// Configure recognition settings
recognition.continuous = true;
recognition.interimResults = true;
recognition.lang = 'en-US'; // Set language - change as needed
let finalTranscript = '';
// Event handler for recognition results
recognition.onresult = (event) => {
let interimTranscript = '';
// Clear final transcript at the start of new session
if (event.resultIndex === 0) {
finalTranscript = '';
}
for (let i = event.resultIndex; i < event.results.length; i++) {
const transcript = event.results[i][0].transcript;
if (event.results[i].isFinal) {
// Replace rather than append to avoid duplicates
finalTranscript = transcript;
if (onResult) onResult(finalTranscript);
} else {
interimTranscript = transcript;
}
}
// Update the output element
if (outputElement) {
outputElement.innerHTML = finalTranscript + (interimTranscript ? '<span style="color:#999">' + interimTranscript + '</span>' : '');
}
};
recognition.onerror = (event) => {
console.error('Speech recognition error', event.error);
};
recognition.onend = () => {
console.log('Speech recognition ended');
};
// Return control methods
return {
start: () => recognition.start(),
stop: () => recognition.stop(),
abort: () => recognition.abort(),
getFinalTranscript: () => finalTranscript
};
}
// Get DOM elements
const outputElement = document.getElementById('outputText');
const startBtn = document.getElementById('startBtn');
const stopBtn = document.getElementById('stopBtn');
// Initialize speech recognition
const recognizer = speechToText(outputElement, (finalText) => {
console.log('Final text:', finalText);
});
// Add button event listeners
startBtn.addEventListener('click', () => {
outputElement.textContent = 'Listening...';
recognizer.start();
});
stopBtn.addEventListener('click', () => {
recognizer.stop();
console.log('Final transcript:', recognizer.getFinalTranscript());
});
</script>