原生js实现带麦克风选择及声音视觉效果的录音成wav文件下载代码
代码语言:html
所属分类:多媒体
代码描述:原生js实现带麦克风选择及声音视觉效果的录音成wav文件下载代码
下面为部分代码预览,完整代码请点击下载或在bfwstudio webide中打开
<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <style> #msg { visibility: hidden; color: red; font-weight: bold; font-size: 22px; font-family: Verdana; } button { padding: 5px 10px; border: 1px solid grey; font-size: 18px; background: white; } .audio-controls { display: flex; align-items: center; padding-top: 20px; justify-content: center; } .audio-controls button { margin: 0px 5px; } canvas { margin-top: 10px; background-color: black; } select { height: 25px; margin: 0px 5px; } a { margin-left: 20px; } .app { text-align: center; padding-top: 20px; } </style> </head> <body> <div class="app"> <select name="" id="micSelect"></select> <select id="visSelect"> <option value="frequencybars">Bar</option> <option value="sinewave">Wave</option> <option value="circle">Circle</option> </select> <a id="download">Download</a> <div class="audio-controls"> <button id="record">Record</button> <button id="stop">Stop</button> <audio id="audio" controls></audio> </div> <div id="msg">Recording...</div> <canvas width="500" height="300"></canvas> <div> <script> (async () => { let leftchannel = []; let rightchannel = []; let recorder = null; let recording = false; let recordingLength = 0; let volume = null; let audioInput = null; let sampleRate = null; let AudioContext = window.AudioContext || window.webkitAudioContext; let context = null; let analyser = null; let canvas = document.querySelector('canvas'); let canvasCtx = canvas.getContext("2d"); let visualSelect = document.querySelector('#visSelect'); let micSelect = document.querySelector('#micSelect'); let stream = null; let tested = false; try { window.stream = stream = await getStream(); console.log('Got stream'); } catch (err) { alert('Issue getting mic', err); } const deviceInfos = await navigator.mediaDevices.enumerateDevices(); var mics = []; for (let i = 0; i !== deviceInfos.length; ++i) { let deviceInfo = deviceInfos[i]; if (deviceInfo.kind === 'audioinput') { mics.push(deviceInfo); let label = deviceInfo.label || 'Microphone ' + mics.length; console.log('Mic ', label + ' ' + deviceInfo.deviceId); const option = document.createElement('option'); option.value = deviceInfo.deviceId; option.text = label; micSelect.appendChild(option); } } function getStream(constraints) { if (!constraints) { constraints = { audio: true, video: false }; } return navigator.mediaDevices.getUserMedia(constraints); } setUpRecording(); function setUpRecording() { context = new AudioContext(); sampleRate = context.sampleRate; // creates a gain node volume = context.createGain(); // creates an audio node from teh microphone incoming stream audioInput = context.createMediaStreamSource(stream); // Create analyser analyser = context.createAnalyser(); // connect audio input to the analyser audioInput.connect(analyser); // connect analyser to the volume control // analyser.connect(volume); let bufferSize = 2048; let recorder = context.createScriptProcessor(bufferSize, 2, 2); // we connect the volume control to the processor // volume.connect(recorder); analyser.connect(recorder); // finally connect the processor to the output recorder.connect(context.destination); recorder.onaudioprocess = function (e) { // Check if (!recording) return; // Do something with the data, i.e Convert this to WAV console.log('recording'); let left = e.inputBuffer.getChannelData(0); let right = e.inputBuffer.getChannelData(1); if (!tested) { tested = true; // if this reduces to 0 we are not getting any sound if (!left.reduce((a, b) => a + b)) { alert("There seems to be an issue with your Mic"); // clean up; stop(); stream.getTracks().forEach(function (track) { track.stop(); }); context.close(); } } // we clone the samples leftchannel.push(new Float32Array(left)); rightchannel.push(new Float32Array(right)); recordingLength += bufferSize; }; visualize(); }; function mergeBuffers(channelBuffer, recordingLength) { let result = new Float32Array(recordingLength); let offset = 0; let lng = channelBuffer.length; for (let i = 0; i < lng; i++) { let buffer = channelBuffer[i]; result.set(buffer, offset); offset += buffer.length; } return result; } function interleave(leftChannel, rightChannel) { let length = leftChannel.length + rightChannel.length; let result = new Float32Array(length); let inputIndex = 0; for (let index = 0; index < length;) { result[index++] = leftChannel[inputIndex]; result[index++] = rightChannel[inputIndex]; inputIndex++; } return result; } function writeUTFBytes(view, offset, string) { let lng = string.length; for (let i = 0; i < lng; i++) { view.setUint8(offset + i, string.charCodeAt(i)); } } function start() { recording = true; document.querySelector('#msg').style.visibility = 'visible'; // reset the buffers for the new recording leftchannel.length = rightchannel.length = 0; recordingLength = 0; console.log('context: ', !!context); if (!context) setUpRecording(); } function stop() { console.log('Stop'); recording = false; document.querySelector('#msg').style.visibility = 'hidden'; // we flat the left and right channels down let leftBuffer = mergeBuffers(leftchannel, recordingLength); let rightBuffer = mergeBuffers(rightchannel, recordingLength); .........完整代码请登录后点击上方下载按钮下载查看
网友评论0