301 lines
9.9 KiB
HTML
301 lines
9.9 KiB
HTML
<!DOCTYPE html>
|
|
<html>
|
|
<head>
|
|
<title>WAV Recorder (16-bit Mono 16kHz)</title>
|
|
<meta charset="UTF-8">
|
|
<style>
|
|
body {
|
|
font-family: Arial, sans-serif;
|
|
max-width: 600px;
|
|
margin: 0 auto;
|
|
padding: 20px;
|
|
}
|
|
button {
|
|
padding: 10px 15px;
|
|
margin: 5px;
|
|
font-size: 16px;
|
|
cursor: pointer;
|
|
}
|
|
#audioVisualizer {
|
|
width: 100%;
|
|
height: 100px;
|
|
background-color: #f0f0f0;
|
|
margin: 20px 0;
|
|
}
|
|
#status {
|
|
margin-top: 20px;
|
|
font-style: italic;
|
|
color: #666;
|
|
}
|
|
</style>
|
|
</head>
|
|
<body>
|
|
<h1>WAV Recorder (16-bit Mono 16kHz)</h1>
|
|
|
|
<div id="audioVisualizer"></div>
|
|
|
|
<button id="startBtn">Start Recording</button>
|
|
<button id="stopBtn" disabled>Stop Recording</button>
|
|
<button id="playBtn" disabled>Play Recording</button>
|
|
<button id="downloadBtn" disabled>Download WAV</button>
|
|
|
|
<div id="status">Ready to record</div>
|
|
|
|
<script>
|
|
// DOM elements
|
|
const startBtn = document.getElementById('startBtn');
|
|
const stopBtn = document.getElementById('stopBtn');
|
|
const playBtn = document.getElementById('playBtn');
|
|
const downloadBtn = document.getElementById('downloadBtn');
|
|
const audioVisualizer = document.getElementById('audioVisualizer');
|
|
const statusDisplay = document.getElementById('status');
|
|
|
|
// Audio variables
|
|
let audioContext;
|
|
let mediaStream;
|
|
let processor;
|
|
let recording = false;
|
|
let audioChunks = [];
|
|
let audioBlob;
|
|
let audioUrl;
|
|
|
|
// Configuration
|
|
const config = {
|
|
sampleRate: 16000, // 16kHz sample rate
|
|
numChannels: 1, // Mono
|
|
bitDepth: 16 // 16-bit
|
|
};
|
|
|
|
// Start recording
|
|
startBtn.addEventListener('click', async () => {
|
|
try {
|
|
statusDisplay.textContent = "Requesting microphone access...";
|
|
|
|
// Get microphone access
|
|
mediaStream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
|
|
// Create audio context with our desired sample rate
|
|
audioContext = new (window.AudioContext || window.webkitAudioContext)({
|
|
sampleRate: config.sampleRate
|
|
});
|
|
|
|
// Create a script processor node to process the audio
|
|
processor = audioContext.createScriptProcessor(4096, 1, 1);
|
|
processor.onaudioprocess = processAudio;
|
|
|
|
// Create a media stream source
|
|
const source = audioContext.createMediaStreamSource(mediaStream);
|
|
|
|
// Connect the source to the processor and to the destination
|
|
source.connect(processor);
|
|
processor.connect(audioContext.destination);
|
|
|
|
// Setup visualization
|
|
setupVisualizer(source);
|
|
|
|
// Start recording
|
|
recording = true;
|
|
audioChunks = [];
|
|
|
|
// Update UI
|
|
startBtn.disabled = true;
|
|
stopBtn.disabled = false;
|
|
statusDisplay.textContent = "Recording...";
|
|
|
|
console.log('Recording started at ' + config.sampleRate + 'Hz');
|
|
} catch (error) {
|
|
console.error('Error:', error);
|
|
statusDisplay.textContent = "Error: " + error.message;
|
|
}
|
|
});
|
|
|
|
// Process audio data
|
|
function processAudio(event) {
|
|
if (!recording) return;
|
|
|
|
// Get audio data (already mono because we set numChannels to 1)
|
|
const inputData = event.inputBuffer.getChannelData(0);
|
|
|
|
// Convert float32 to 16-bit PCM
|
|
const buffer = new ArrayBuffer(inputData.length * 2);
|
|
const view = new DataView(buffer);
|
|
|
|
for (let i = 0, offset = 0; i < inputData.length; i++, offset += 2) {
|
|
const s = Math.max(-1, Math.min(1, inputData[i]));
|
|
view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
|
|
}
|
|
|
|
// Store the chunk
|
|
audioChunks.push(new Uint8Array(buffer));
|
|
}
|
|
|
|
// Stop recording
|
|
stopBtn.addEventListener('click', () => {
|
|
if (!recording) return;
|
|
|
|
// Stop recording
|
|
recording = false;
|
|
|
|
// Disconnect processor
|
|
if (processor) {
|
|
processor.disconnect();
|
|
processor = null;
|
|
}
|
|
|
|
// Stop media stream tracks
|
|
if (mediaStream) {
|
|
mediaStream.getTracks().forEach(track => track.stop());
|
|
}
|
|
|
|
// Create WAV file from collected chunks
|
|
createWavFile();
|
|
|
|
// Update UI
|
|
startBtn.disabled = false;
|
|
stopBtn.disabled = true;
|
|
playBtn.disabled = false;
|
|
downloadBtn.disabled = false;
|
|
statusDisplay.textContent = "Recording stopped. Ready to play or download.";
|
|
});
|
|
|
|
// Create WAV file from collected PCM data
|
|
function createWavFile() {
|
|
// Combine all chunks into a single buffer
|
|
const length = audioChunks.reduce((acc, chunk) => acc + chunk.length, 0);
|
|
const result = new Uint8Array(length);
|
|
let offset = 0;
|
|
|
|
audioChunks.forEach(chunk => {
|
|
result.set(chunk, offset);
|
|
offset += chunk.length;
|
|
});
|
|
|
|
// Create WAV header
|
|
const wavHeader = createWaveHeader(result.length, config);
|
|
|
|
// Combine header and PCM data
|
|
const wavData = new Uint8Array(wavHeader.length + result.length);
|
|
wavData.set(wavHeader, 0);
|
|
wavData.set(result, wavHeader.length);
|
|
|
|
// Create blob
|
|
audioBlob = new Blob([wavData], { type: 'audio/wav' });
|
|
audioUrl = URL.createObjectURL(audioBlob);
|
|
}
|
|
|
|
// Create WAV header
|
|
function createWaveHeader(dataLength, config) {
|
|
const byteRate = config.sampleRate * config.numChannels * (config.bitDepth / 8);
|
|
const blockAlign = config.numChannels * (config.bitDepth / 8);
|
|
|
|
const buffer = new ArrayBuffer(44);
|
|
const view = new DataView(buffer);
|
|
|
|
// RIFF identifier
|
|
writeString(view, 0, 'RIFF');
|
|
// RIFF chunk length
|
|
view.setUint32(4, 36 + dataLength, true);
|
|
// RIFF type
|
|
writeString(view, 8, 'WAVE');
|
|
// Format chunk identifier
|
|
writeString(view, 12, 'fmt ');
|
|
// Format chunk length
|
|
view.setUint32(16, 16, true);
|
|
// Sample format (raw)
|
|
view.setUint16(20, 1, true);
|
|
// Channel count
|
|
view.setUint16(22, config.numChannels, true);
|
|
// Sample rate
|
|
view.setUint32(24, config.sampleRate, true);
|
|
// Byte rate (sample rate * block align)
|
|
view.setUint32(28, byteRate, true);
|
|
// Block align (channel count * bytes per sample)
|
|
view.setUint16(32, blockAlign, true);
|
|
// Bits per sample
|
|
view.setUint16(34, config.bitDepth, true);
|
|
// Data chunk identifier
|
|
writeString(view, 36, 'data');
|
|
// Data chunk length
|
|
view.setUint32(40, dataLength, true);
|
|
|
|
return new Uint8Array(buffer);
|
|
}
|
|
|
|
// Helper to write strings to DataView
|
|
function writeString(view, offset, string) {
|
|
for (let i = 0; i < string.length; i++) {
|
|
view.setUint8(offset + i, string.charCodeAt(i));
|
|
}
|
|
}
|
|
|
|
// Play recorded audio
|
|
playBtn.addEventListener('click', () => {
|
|
if (!audioUrl) return;
|
|
|
|
const audio = new Audio(audioUrl);
|
|
audio.play();
|
|
statusDisplay.textContent = "Playing recording...";
|
|
|
|
audio.onended = () => {
|
|
statusDisplay.textContent = "Playback finished";
|
|
};
|
|
});
|
|
|
|
// Download recorded audio as WAV file
|
|
downloadBtn.addEventListener('click', () => {
|
|
if (!audioBlob) return;
|
|
|
|
const a = document.createElement('a');
|
|
a.href = audioUrl;
|
|
a.download = `recording_${config.sampleRate}Hz_${config.bitDepth}bit.wav`;
|
|
document.body.appendChild(a);
|
|
a.click();
|
|
document.body.removeChild(a);
|
|
|
|
statusDisplay.textContent = "Download started";
|
|
});
|
|
|
|
// Setup audio visualization
|
|
function setupVisualizer(source) {
|
|
const analyser = audioContext.createAnalyser();
|
|
analyser.fftSize = 64;
|
|
source.connect(analyser);
|
|
|
|
const bufferLength = analyser.frequencyBinCount;
|
|
const dataArray = new Uint8Array(bufferLength);
|
|
|
|
const canvas = document.createElement('canvas');
|
|
canvas.width = audioVisualizer.offsetWidth;
|
|
canvas.height = audioVisualizer.offsetHeight;
|
|
audioVisualizer.innerHTML = '';
|
|
audioVisualizer.appendChild(canvas);
|
|
|
|
const canvasCtx = canvas.getContext('2d');
|
|
|
|
function draw() {
|
|
requestAnimationFrame(draw);
|
|
|
|
analyser.getByteFrequencyData(dataArray);
|
|
|
|
canvasCtx.fillStyle = 'rgb(200, 200, 200)';
|
|
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
|
|
|
|
const barWidth = (canvas.width / bufferLength) * 2.5;
|
|
let x = 0;
|
|
|
|
for (let i = 0; i < bufferLength; i++) {
|
|
const barHeight = dataArray[i] / 2;
|
|
|
|
canvasCtx.fillStyle = `rgb(${barHeight + 100}, 50, 50)`;
|
|
canvasCtx.fillRect(x, canvas.height - barHeight, barWidth, barHeight);
|
|
|
|
x += barWidth + 1;
|
|
}
|
|
}
|
|
|
|
draw();
|
|
}
|
|
</script>
|
|
</body>
|
|
</html>
|