Recording Audio from the Microphone in JavaScript: Oscilloscope Display, Download, POST, and Playback
When you press the “Enable Microphone” button, the microphone becomes active and the incoming audio is visualized in real time like an oscilloscope.
Then, press the “Start Recording” button to begin recording.
Source Code
<div style="display:flex;flex-wrap:wrap;">
<div>
<button id="MicOn" style="font-size:20px;">Enable Microphone</button>
</div>
<div>
<button id="RecStart" style="font-size:20px;">Start Recording</button>
<button id="RecStop" style="font-size:20px;">Stop Recording</button>
</div>
<div style="width:100%;max-width:400px;min-width:280px;">
<audio id="AudioPlay" controls playsinline style="width:100%;"></audio>
</div>
<div style="width:100%;max-width:800px;min-width:280px;">
<div style="width:80%;padding-top:20%;padding-bottom:0;padding-left:0;padding-right:0;margin:0;position:relative;box-sizing:border-box;">
<div style="top:0;right:0;bottom:0;left:0;position:absolute;margin:0;display:block;box-sizing:border-box;">
<canvas id="AudioWave" style="margin:0;padding:0;width:100%;height:100%;display:block;box-sizing:border-box;"></canvas>
</div>
</div>
</div>
</div>
<script>
var TMamMicRec = function(micOnBtn, recStartBtn, recStopBtn, audioPlay, audioWave){
this.micOnBtn = micOnBtn; // Button to enable microphone access
this.recStartBtn = recStartBtn; // Button to start recording
this.recStopBtn = recStopBtn; // Button to stop recording
this.audioPlay = audioPlay; // <audio> element
this.audioWave = audioWave; // Canvas for waveform display
this.audioWaveCtx = this.audioWave.getContext('2d', {willReadFrequently:true});
this.stream = null;
this.mediaRecorder = null;
this.chunks = [];
this.recStartBtn.setAttribute("disabled", true);
this.recStopBtn.setAttribute("disabled", true);
this.type = null;
this.audioCtx = null;
this.audioSource = null;
this.audioAnalyser = null;
this.audioBufLen = 0;
this.audioBuf = null;
this.drawWave = function(){ // Draw waveform
let drawVisual = window.requestAnimationFrame(this.drawWave.bind(this));
this.audioAnalyser.getByteTimeDomainData(this.audioBuf);
// Values range from 0–255, with 128 representing silence
this.audioWaveCtx.fillStyle = 'rgb(200,200,200)';
this.audioWaveCtx.fillRect(0, 0, this.audioBufLen, this.audioBufLen/4);
this.audioWaveCtx.lineWidth = this.audioBufLen / 256;
this.audioWaveCtx.StrokeStyle = 'rgb(0,0,0)';
this.audioWaveCtx.beginPath();
let y = this.audioBufLen/4/256;
for (let i = 0; i < this.audioBufLen; i++){
if (i === 0){
this.audioWaveCtx.moveTo(i, this.audioBuf[i] * y);
} else {
this.audioWaveCtx.lineTo(i, this.audioBuf[i] * y);
}
}
this.audioWaveCtx.stroke();
}
this.micOnBtn.addEventListener("click", function(){
if (navigator.mediaDevices == undefined){
alert('Unsupported browser or not using HTTPS');
return;
}
navigator.mediaDevices.getUserMedia({audio:true})
.then(function(stream){
this.stream = stream;
if (this.audioCtx == null){
let AudioContext = window.AudioContext || window.webkitAudioContext;
this.audioCtx = new AudioContext;
this.audioSource = this.audioCtx.createMediaStreamSource(this.stream);
this.audioAnalyser = this.audioCtx.createAnalyser();
this.audioAnalyser.fftSize = 2048;
this.audioSource.connect(this.audioAnalyser);
this.audioBufLen = this.audioAnalyser.fftSize;
this.audioWave.setAttribute("width", this.audioBufLen + 'px');
this.audioWave.setAttribute("height", this.audioBufLen/4 + 'px');
this.audioBuf = new Uint8Array(this.audioBufLen);
this.audioAnalyser.getByteTimeDomainData(this.audioBuf);
this.drawWave();
}
this.mediaRecorder = new MediaRecorder(this.stream);
this.mediaRecorder.addEventListener("dataavailable", function(event){
this.chunks.push(event.data);
}.bind(this));
this.mediaRecorder.addEventListener("stop", function(e){
// audio/webm;codecs=opus, audio/ogg;codecs=opus, etc.
this.type = this.chunks[0].type;
let blob = new Blob(this.chunks, {"type": this.type});
this.chunks = [];
/*
// When downloading the recorded file
let aTag = document.createElement("a");
aTag.href = URL.createObjectURL(blob);
aTag.download = "a.mp4";
aTag.click();
*/
/*
// Convert to Data URI, place it in a <input type="hidden"> value, and send it to the server via POST
let fileReaderPost = new FileReader();
fileReaderPost.addEventListener("load", function(event){
let formTag = document.createElement('form');
formTag.method = "post";
formTag.action = "post.php"; // URL for POST
let inputTag = document.createElement('input');
inputTag.type = "hidden";
inputTag.name = "record"; // POST field name
inputTag.value = event.target.result; // POST value
formTag.appendChild(inputTag);
document.body.appendChild(formTag);
formTag.submit(); // Execute POST
}.bind(this));
fileReaderPost.readAsDataURL(blob);
*/
// Convert the recorded blob to a Data URI and play it directly using an <audio> tag
let fileReaderAudio = new FileReader();
fileReaderAudio.addEventListener("load", function(event){
this.audioPlay.pause();
this.audioPlay.currentTime = 0;
this.audioPlay.setAttribute("src", event.target.result);
this.audioPlay.load();
this.audioPlay.play();
}.bind(this));
fileReaderAudio.readAsDataURL(blob);
this.recStartBtn.removeAttribute("disabled");
this.recStopBtn.setAttribute("disabled", true);
}.bind(this));
this.recStartBtn.removeAttribute("disabled");
this.micOnBtn.setAttribute("disabled", true);
}.bind(this)).catch(function(e){
console.log(e);
document.getElementById("alert").innerHTML = e;
}.bind(this));
}.bind(this));
this.recStartBtn.addEventListener("click", function(){
this.recStartBtn.setAttribute("disabled", true);
this.recStopBtn.removeAttribute("disabled");
this.mediaRecorder.start();
}.bind(this));
this.recStopBtn.addEventListener("click", function(){
this.mediaRecorder.stop();
}.bind(this));
}
window.addEventListener("DOMContentLoaded", function(){
mamMicRec = new TMamMicRec(
document.getElementById("MicOn"),
document.getElementById("RecStart"),
document.getElementById("RecStop"),
document.getElementById("AudioPlay"),
document.getElementById("AudioWave"),
);
});
</script>
