Due to project needs, we need to implement the recording function on the web side. At the beginning, there were two solutions found, one was through iframe, and the other was the getUserMedia api of html5. Since our recording function does not need to be compatible with IE browser, we did not hesitate to choose getUserMedia provided by html5 to implement it. The basic idea is to combine it with the official api documentation and some solutions found online to make a solution that suits the needs of the project. But since we had to ensure that the recording function could be turned on on both the pad and PC at the same time, there were some pitfalls. The following is a process restoration.
Step 1Because the new api passes navigator.mediaDevices.getUserMedia and returns a promise.
The old API is navigator.getUserMedia, so compatibility was made. The code is as follows:
// Old browsers may not implement mediaDevices at all, so we can set an empty object first if (navigator.mediaDevices === undefined) { navigator.mediaDevices = {};}// Some browsers partially support mediaDevices. We cannot directly set getUserMedia// on the object because this may overwrite existing properties. Here we will only add the getUserMedia attribute if it does not exist. if (navigator.mediaDevices.getUserMedia === undefined) { let getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia; navigator.mediaDevices.getUserMedia = function(constraints) { // First, if there is getUserMedia, get it // Some browsers do not implement it at all - then return an error to the reject of the promise to maintain a unified interface if (!getUserMedia) { return Promise.reject(new Error(' getUserMedia is not implemented in this browser')); } // Otherwise, wrap a Promise for the old navigator.getUserMedia method return new Promise(function(resolve, reject) { getUserMedia.call(navigator, constraints, resolve, reject); }); };Step 2
This is a method that exists on the Internet, encapsulating a HZRecorder. Basically referencing this method. The recording interface can be called up by calling HZRecorder.get. This method passes in a callback function. After new HZRecorder, the callback function is executed and a materialized HZRecorder object is passed in. Functions such as starting recording, pausing, stopping, and playing can be implemented through the methods of this object.
var HZRecorder = function (stream, config) { config = config || {}; config.sampleBits = config.sampleBits || 8; //Sampling bits 8, 16 config.sampleRate = config.sampleRate || (44100 / 6) ; //Sampling rate (1/6 44100) //Create an audio environment object audioContext = window.AudioContext || window.webkitAudioContext; var context = new audioContext(); //Input sound into this object var audioInput = context.createMediaStreamSource(stream); //Set the volume node var volume = context.createGain(); audioInput .connect(volume); //Create a cache to cache sound var bufferSize = 4096; //Create a sound cache node, createScriptProcessor method // The second and third parameters refer to both input and output being binaural. var recorder = context.createScriptProcessor(bufferSize, 2, 2); var audioData = { size: 0 //Recording file length, buffer: [] //Recording cache, inputSampleRate: context.sampleRate //Input sampling rate, inputSampleBits: 16 //Input sampling digits 8, 16, outputSampleRate: config.sampleRate //Output sampling rate, oututSampleBits: config.sampleBits //Output sample bits 8, 16, input: function (data) { this.buffer.push(new Float32Array(data)); this.size += data.length; }, compress: function () { / /Merge compression//Merge var data = new Float32Array(this.size); var offset = 0; for (var i = 0; i < this.buffer.length; i++) { data.set(this.buffer[i], offset); offset += this.buffer[i].length; } //Compression var compression = parseInt(this.inputSampleRate / this.outputSampleRate); var length = data.length / compression; var result = new Float32Array(length); var index = 0, j = 0; while (index < length) { result[index] = data[j]; j += compression; index++; } return result; } , encodeWAV: function () { var sampleRate = Math.min(this.inputSampleRate, this.outputSampleRate); var sampleBits = Math.min(this.inputSampleBits, this.oututSampleBits); var bytes = this .compress(); var dataLength = bytes.length * (sampleBits / 8); var buffer = new ArrayBuffer(44 + dataLength); var data = new DataView(buffer); var channelCount = 1; //Mono var offset = 0; var writeString = function (str) { for (var i = 0; i < str. length; i++) { data.setUint8(offset + i, str.charCodeAt(i)); } }; // Resource exchange file identifier writeString('RIFF'); offset += 4; //The total number of bytes from the next address to the end of the file, that is, the file size -8 data.setUint32(offset, 36 + dataLength, true); offset += 4; //WAV file flag writeString(' WAVE'); offset += 4; // Waveform format flag writeString('fmt '); offset += 4; // Filter bytes, generally 0x10 = 16 data.setUint32(offset, 16, true); offset += 4; // Format category (PCM format sampling data) data.setUint16(offset, 1, true); offset += 2; // Number of channels data.setUint16( offset, channelCount, true); offset += 2; // Sampling rate, the number of samples per second, represents the playback speed of each channel data.setUint32(offset, sampleRate, true); offset += 4; //Waveform data transfer rate (average bytes per second) Mono × data bits per second × data bits per sample/8 data.setUint32(offset, channelCount * sampleRate * (sampleBits / 8), true); offset += 4; // The number of bytes occupied by the fast data adjustment number sample at one time is mono × the number of data bits per sample/8 data.setUint16(offset, channelCount * (sampleBits / 8), true); offset += 2; // Number of data bits per sample data.setUint16(offset, sampleBits, true); offset += 2; // Data identifier writeString('data'); offset += 4; //Total number of sampled data, that is, total data size -44 data.setUint32(offset, dataLength, true); offset += 4; //Write sampled data if (sampleBits === 8) { for (var i = 0; i < bytes.length; i++, offset++) { var s = Math.max(-1, Math.min(1, bytes[i])); var val = s < 0 ? s * 0x8000 : s * 0x7FFF; val = parseInt(255 / (65535 / (val + 32768))); data.setInt8(offset, val, true); } } else { for (var i = 0; i < bytes.length; i++, offset += 2) { var s = Math.max(-1 , Math.min(1, bytes[i])); data.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true); } } return new Blob([data], { type: 'audio/wav' }); } }; //Start recording this.start = function () { audioInput.connect(recorder); recorder. connect(context.destination); }; //Stop this.stop = function () { recorder.disconnect(); }; //End this.end = function() { context.close(); }; //Continue this.again = function() { recorder.connect(context.destination); }; //Get the audio file this.getBlob = function () { this.stop(); return audioData.encodeWAV(); }; //Playback this.play = function (audio) { audio.src = window.URL.createObjectURL(this.getBlob()); }; //Upload this.upload = function (url, callback) { var fd = new FormData(); fd.append('audioData', this.getBlob()); var xhr = new XMLHttpRequest(); if (callback) { xhr.upload.addEventListener('progress', function (e ) { callback('uploading', e); }, false); xhr.addEventListener('load', function (e) { callback('ok', e); }, false); xhr.addEventListener('error', function (e) { callback('error', e); }, false); xhr.addEventListener('abort', function (e) { callback('cancel', e); }, false); } xhr.open('POST', url); xhr.send(fd); }; //Audio collection recorder.onaudioprocess = function (e) { audioData.input(e.inputBuffer.getChannelData(0)); //record(e.inputBuffer.getChannelData(0)); }; }; //throw exception HZRecorder.throwError = function (message) { throw new function () { this.toString = function () { return message; };}; }; //Whether recording is supported HZRecorder.canRecording = (navigator.getUserMedia != null); //Get the recorder HZRecorder.get = function (callback, config) { if (callback) { navigator.mediaDevices .getUserMedia({ audio: true }) .then(function(stream) { let rec = new HZRecorder(stream, config); callback(rec); }) .catch(function(error) { HZRecorder.throwError('Unable to record, please check the device status'); }); }}; window.HZRecorder = HZRecorder;
The above can already meet most of the needs. But we need to be compatible with the pad side. We have several issues with our pad that must be resolved.
Below are solutions to both problems.
Step 3The following is a solution for me to implement the recording format as mp3 and window.URL.createObjectURL to pass in blob data and report an error on the pad side.
1. Modify the audioData object code in HZRecorder. And introduce a js file lamejs.js from a great person on the Internet
const lame = new lamejs();let audioData = { samplesMono: null, maxSamples: 1152, mp3Encoder: new lame.Mp3Encoder(1, context.sampleRate || 44100, config.bitRate || 128), dataBuffer: [], size : 0, // Recording file length buffer: [], // Recording buffer inputSampleRate: context.sampleRate, // Input sampling rate inputSampleBits: 16, // Input sampling digits 8, 16 outputSampleRate: config.sampleRate, // Output sampling rate outputSampleBits: config.sampleBits, // Output sampling digits 8, 16 convertBuffer: function(arrayBuffer) { let data = new Float32Array(arrayBuffer); let out = new Int16Array(arrayBuffer.length); this.floatTo16BitPCM(data, out); return out; }, floatTo16BitPCM: function(input, output) { for (let i = 0; i < input. length; i++) { let s = Math.max(-1, Math.min(1, input[i])); output[i] = s < 0 ? s * 0x8000 : s * 0x7fff; } }, appendToBuffer: function(mp3Buf) { this.dataBuffer.push(new Int8Array(mp3Buf)); }, encode: function(arrayBuffer) { this.samplesMono = this .convertBuffer(arrayBuffer); let remaining = this.samplesMono.length; for (let i = 0; remaining >= 0; i += this.maxSamples) { let left = this.samplesMono.subarray(i, i + this.maxSamples); let mp3buf = this.mp3Encoder.encodeBuffer(left); this .appendToBuffer(mp3buf); remaining -= this.maxSamples; } }, finish: function() { this.appendToBuffer(this.mp3Encoder.flush()); return new Blob(this.dataBuffer, { type: 'audio/mp3' }); }, input: function(data) { this.buffer.push(new Float32Array( data)); this.size += data.length; }, compress: function() { // Merge compression // Merge let data = new Float32Array(this.size); let offset = 0; for (let i = 0; i < this.buffer.length; i++) { data.set(this.buffer[i], offset); offset += this.buffer [i].length; } // Compression let compression = parseInt(this.inputSampleRate / this.outputSampleRate, 10); let length = data.length / compression; let result = new Float32Array(length); let index = 0; let j = 0; while (index < length) { result[index] = data[j]; j += compression; index++; } return result; }, encodeWAV: function() { let sampleRate = Math.min(this.inputSampleRate, this.outputSampleRate); let sampleBits = Math.min(this.inputSampleBits, this.oututSampleBits); let bytes = this.compress(); let dataLength = bytes.length * (sampleBits / 8); let buffer = new ArrayBuffer(44 + dataLength); let data = new DataView(buffer); let channelCount = 1; //mono let offset = 0; let writeString = function(str) { for (let i = 0; i < str.length; i++) { data.setUint8(offset + i, str.charCodeAt(i)); } }; // Resource exchange file identifier writeString('RIFF'); offset += 4; // Total bytes from the next address to the end of the file number, that is, file size - 8 data.setUint32(offset, 36 + dataLength, true); offset += 4; // WAV file flag writeString('WAVE'); offset += 4; // Wave format flag writeString('fmt '); offset += 4; // Filter bytes, usually 0x10 = 16 data.setUint32(offset, 16, true); offset += 4; // Format category (PCM form sampling data) data.setUint16(offset, 1, true); offset += 2; // Channel number data.setUint16(offset, channelCount, true); offset += 2; // Sampling rate, the number of samples per second, indicating the playback speed of each channel data.setUint32(offset, sampleRate, true); offset += 4; // Waveform data transmission rate (average bytes per second ) Mono × data bits per second × data bits per sample / 8 data.setUint32(offset, channelCount * sampleRate * (sampleBits / 8), true); offset += 4; // Fast data adjustment number of bytes occupied by one sample mono channel Number data.setUint16(offset, sampleBits, true); offset += 2; // Data identifier writeString('data'); offset += 4; // Total number of sampled data, that is, total data size - 44 data.setUint32(offset, dataLength, true); offset += 4; // Write sample data if (sampleBits === 8) { for (let i = 0; i < bytes.length; i++, offset++) { const s = Math.max(-1, Math.min(1, bytes[i])); let val = s < 0 ? s * 0x8000 : s * 0x7fff; val = parseInt(255 / (65535 / (val + 32768)), 10); data.setInt8(offset, val, true); } } else { for (let i = 0; i < bytes.length; i++ , offset += 2) { const s = Math.max(-1, Math.min(1, bytes[i])); data.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true); } } return new Blob([data], { type: 'audio/wav' }); }};
2. Modify the calling method of HZRecord’s audio collection.
// Audio collection recorder.onaudioprocess = function(e) { audioData.encode(e.inputBuffer.getChannelData(0));};
3. HZRecord’s getBlob method.
this.getBlob = function() { this.stop(); return audioData.finish();};
4. HZRecord’s play method. Convert blob to base64url.
this.play = function(func) { readBlobAsDataURL(this.getBlob(), func);};function readBlobAsDataURL(data, callback) { let fileReader = new FileReader(); fileReader.onload = function(e) { callback(e .target.result); }; fileReader.readAsDataURL(data);}
So far, the above two problems have been solved.
Step 4Here we mainly introduce how to create dynamic effects during recording. One of our animation requirements is:
According to the incoming volume, a circular arc is dynamically expanded.
//Create an analyzer node and obtain audio time and frequency data const analyzer = context.createAnalyser();audioInput.connect(analyser);const inputAnalyser = new Uint8Array(1);const wrapEle = $this.refs['wrap']; let ctx = wrapEle.getContext('2d');const width = wrapEle.width;const height = wrapEle.height;const center = { x: width / 2, y: height / 2}; function drawArc(ctx, color, x, y, radius, beginAngle, endAngle) { ctx.beginPath(); ctx.lineWidth = 1 ; ctx.strokeStyle = color; ctx.arc(x, y, radius, (Math.PI * beginAngle) / 180, (Math.PI * endAngle) / 180); ctx.stroke();}(function drawSpectrum() { analyser.getByteFrequencyData(inputAnalyser); // Get frequency domain data ctx.clearRect(0, 0, width, height); // Draw lines for (let i = 0; i < 1; i++) { let value = inputAnalyser[i] / 3; // <===Get data let colors = []; if (value <= 16) { colors = ['#f5A631', '#f5A631', '#e4e4e4', '#e4e4e4', '#e4e4e4', '# e4e4e4']; } else if (value <= 32) { colors = ['#f5A631', '#f5A631', '#f5A631', '#f5A631', '#e4e4e4', '#e4e4e4']; } else { colors = ['#f5A631', '#f5A631', '#f5A631', '#f5A631 ', '#f5A631', '#f5A631']; } drawArc(ctx, colors[0], center.x, center.y, 52 + 16, -30, 30); drawArc(ctx, colors[1], center.x, center.y , 52 + 16, 150, 210); drawArc(ctx, colors[2], center.x, center.y, 52 + 32, -22.5, 22.5); drawArc(ctx, colors[3], center.x, center.y, 52 + 32, 157.5, 202.5); drawArc(ctx, colors[4], center.x, center.y, 52 + 48, -13, 13); drawArc(ctx, colors[5], center.x, center.y, 52 + 48, 167, 193); } // Request the next frame requestAnimationFrame(drawSpectrum);})();End of fate
At this point, a complete html5 recording function solution has been completed. If there is anything that needs to be added, please leave a message if there is anything unreasonable.
ps: lamejs can refer to this github
The above is the entire content of this article. I hope it will be helpful to everyone’s study. I also hope everyone will support VeVb Wulin Network.