How to record and playback within browser (html 5 audio)

隐身守侯 提交于 2021-02-07 20:51:24

问题


I'm building a simple html component that will allow a user to record a message and then immediately play it back. The aim is to allow them to discard the recording if they don't like it before saving the recording. The messages will be less than 60 seconds.

So I have a UI that allows me to record audio into an array of AudioBuffers. I'm now trying to push those buffers back into a second audio control so a user can play the recording back.

Here is a codepen.

This is the core playback function which I assume is at the heart of the problem. The solution must work within a browser on android and ios and not use third party libraries (to make your life really difficult :)

function _playback(){
    let context = new AudioContext();
    var dest = context.createMediaStreamDestination();

    let source = context.createBufferSource(); // creates a sound source
    source.buffer = _appendBuffer(context, this._audioBuffer);  

    source.connect(dest);

    let player = window.getElementById("playback");
    player.srcObject = dest.stream;
    player.play();
}

The key issue is that the audio doesn't playback even though the player control shows time progressing.

I also have questions about whether I should be using two players (one for recording and one for playback) or I can just use a single audio element?


回答1:


So this one was a bit of a nightmare so I though I would post the full script here.

I started with the famous recorder.js but had some problems adapting it to my needs. I ended up doing a major refactor to make the code easier to understand. Unlike recorder.js this code doesn't use worker threads (which I really didn't need).

    /**
 * Allows recording via the devices microphone.
 */

class Recorder
{
    /**
     * Constraints setup for mono.
     * Currently now to modify them.
     * It should be noted that these settings are ignored on most
     * systems and we get stereo at a 44K sampleRate regardless of these settings.
     */
    static  _constraints = {
              audio: 
                  {
                        channelCount: 1,
                        mimeType: 'audio/wav',
                        sampleRate: 8192,
                        sampleSize: 8,
                        autoGainControl: true,
                        noiseSuppression: true,
                        echoCancellation: true,
                  }
            };

    constructor(desiredChannels)
    {
        this._desiredChannels = desiredChannels;
        this._reset();
    }


    /*
     * Start recording.
     * 
     * errorCallback(e) - a function  that is called if the start fails.
     * 
     */
    start(errorCallback)
    {
        this._reset();
        this._context = new AudioContext();

        // request permission and if given
        // wire our audio control to the media stream.  
        navigator
            .mediaDevices
            .getUserMedia(Recorder._constraints)
            .then((stream) => this._wireRecordingStream(stream))
            .catch(e => errorCallback(e));

        // TODO: consider giving the user the ability to select an input device.
    }

    /*
     * Stops a currently active recording.
     */
    stop()
    {
        if (this._context != null)
        {
            this._context.close();
            this._context = null;
        }
    }

    /**
     * check if the user's phone supports media api
     */
    hasGetUserMedia() 
    {
          return !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia);
    }

    /**
     * returns a Blob containing a wav file of the recording.  
     */
    getWav()
    {
        if (this._mergedChannelData == null)
            this._mergeChannels();

        let wav = new Wav(this._mergedChannelData, this._actualChannelCount, this._actualSampleRate);

        return wav.getBlob();
    }


    /**
     * resets the Recorder so we can restart the recording.
     */
    _reset()
    {
        this._channels = null;
        this._actualChannelCount = -1;

        // this will be updated when the recording starts to the actual rate.
        this._actualSampleRate = -1;

        // after _mergeChannels is called this will contain
        // a single float32 array of the underlying channel 
        // data interleaved to create a single audio stream.
        this._mergedChannelData = null;

    }


    _initChannelBuffers(actualChannels) 
    {
        if (this._channels == null)
        {
            this._channels = [];
            this._actualChannelCount = actualChannels;

            for (var i = 0; i < actualChannels; i++) 
            {
                this._channels.push(new Channel());
            }
        }
    }


    /**
     * The start() method uses this method to initialise the media stream
     * and wire up the 'onaudioprocess'or to capture the recording.
     */
    _wireRecordingStream(stream)
    {
        // https://developers.google.com/web/fundamentals/media/recording-audio/

        // Setup recording.
        this._source = this._context.createMediaStreamSource(stream);

        this._node = (this._context.createScriptProcessor || this._context.createJavaScriptNode)
            .call(this._context, 4096, this._desiredChannels, this._desiredChannels); // 4K buffer and we prefer a single (mono) channel.

        // the context may have ignored our preferred sample rate.
        this._actualSampleRate = this._context.sampleRate;

        this._node.onaudioprocess = (e) => this._storeAudio(e.inputBuffer);

        this._source.connect(this._node);
        this._node.connect(this._context.destination);
    }

    /**
     * This is the callback for 'onaudioprocess' where we store the recorded data 
     * to each channel buffer.
     */
    _storeAudio(inputBuffer) 
    {
        this._initChannelBuffers(inputBuffer.numberOfChannels);

        for (var i = 0; i < this._actualChannelCount; i++) 
        {
            this._channels[i].storeAudioPacket(inputBuffer.getChannelData(i));
        }
    }

    // Merges all channels into a single float32Array.
    // Channels are merged by interleaving data packet from each channel into a single stream.
    _mergeChannels() 
    {
        if (this._actualChannelCount === 2) 
        {
            this._mergedChannelData = this._interleave(this._channels[0], this._channels[1]);
        } 
        else 
        {
            this._mergedChannelData = this._channels[0].getAudioData();
        }
    }

    /**
     ** interleaves two channel buffers into a single float32 array.
     */
    _interleave(lhsChannel, rhsChannel) 
    {
        let length = lhsChannel.getLength() + rhsChannel.getLength();
        let result = new Float32Array(length);

        let index = 0;
        let inputIndex = 0;this._channels 

        let lhsData = lhsChannel.getAudioData();
        let rhsData = rhsChannel.getAudioData();

        while (index < length) 
        {
            result[index++] = lhsData[inputIndex];
            result[index++] = rhsData[inputIndex];
            inputIndex++;
        }
        return result;
    }
}

/**
 * Used to buffer audio data for a single channel.
 */
class Channel
{
    constructor()
    {
        /** 
         * the total no of Float32's stored in all of the audio packets.
         */
        this._length = 0;

        // an array of audio packets (Float32Array) captured as the recording progresses.
        // 
        this._audioPackets = [];

        // If flatten has been called this will be a Float32Array
        // contain all of the combined audio packets as  a single array.
        this._flattened = null;
    }

    getLength()
    {
        return this._length;
    }

    /**
     * returns a single audio packet stored at the given index.
     */
    getAudioPacket(index)
    {
        return this._audioPackets[index];
    }

    /**
     * returns the entire underlying data (Float32s) as a single Float32 array
     * If it hasn't already been done this method will call flatten to
     * combine all of the packets into a singl data array.
     */
    getAudioData()
    {
        if (this._flattened == null)
            this._flatten();

        return this._flattened;
    }

    // Stores an audioPacket (Float32Array) to _audioPackets
    storeAudioPacket(audioPacket)
    {
        this._audioPackets.push(new Float32Array(audioPacket));
        this._length += audioPacket.length;
    }

    /**
     * coalesce all of the _audioPackets into a single float32Array
     */
    _flatten() 
    {
        this._flattened = new Float32Array(this._length);
        let  offset = 0;
        for (let i = 0; i < this._audioPackets.length; i++) 
        {
            this._flattened.set(this._audioPackets[i], offset);
            offset += this._audioPackets[i].length;
        }
    }
}

/**
 * The logic for creating a wav file (well just the data structure actually) from
 * a stream of audioData
 * 
 * audioData - Float32Array containing the interleaved data from all channels.
 * channelCount - the number of channels interleaved into the audioData
 * sampleRate - the sampleRate of the audioData.
 */
class Wav
{
    /**
     * expects a single float32array from which it will create a wav file.
     */
    constructor(audioData, channelCount, sampleRate)
    {
        this._audioData = audioData;
        this._channelCount = channelCount;
        this._sampleRate = sampleRate;
    }

    /**
     * returns the wav file as a blob.
     */
    getBlob()
    {
        let wav = this._encodeAsWAV();
        let audioBlob = new Blob([wav], { type: "audio/wav" });

        return audioBlob;
    }

    /**
     * Encodes _audioData into a wav file by adding the 
     * standard wav header.
     */
    _encodeAsWAV() 
    {
        let audioData = this._audioData;

        var wavBuffer = new ArrayBuffer(44 + audioData.length * 2);
        var view = new DataView(wavBuffer);

        /* RIFF identifier */
        this._writeString(view, 0, 'RIFF');
        /* RIFF chunk length */
        view.setUint32(4, 36 + audioData.length * 2, true);
        /* RIFF type */
        this._writeString(view, 8, 'WAVE');
        /* format chunk identifier */
        this._writeString(view, 12, 'fmt ');
        /* format chunk length */
        view.setUint32(16, 16, true);
        /* sample format (raw) */
        view.setUint16(20, 1, true);
        /* channel count */
        view.setUint16(22, this._channelCount, true);
        /* sample rate */
        view.setUint32(24, this._sampleRate, true);
        /* byte rate (sample rate * block align) */
        view.setUint32(28, this._sampleRate * 4, true);
        /* block align (channel count * bytes per sample) */
        view.setUint16(32, this._channelCount * 2, true);
        /* bits per sample */
        view.setUint16(34, 16, true);
        /* data chunk identifier */
        this._writeString(view, 36, 'data');
        /* data chunk length */
        view.setUint32(40, audioData.length * 2, true);

        this._floatTo16BitPCM(view, 44, audioData);

        return view;
    }

    _floatTo16BitPCM(output, offset, input) 
    {
        for (var i = 0; i < input.length; i++, offset += 2) 
        {
            var s = Math.max(-1, Math.min(1, input[i]));
            output.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
        }
    }

    _writeString(view, offset, string) 
    {
        for (var i = 0; i < string.length; i++) 
        {
            view.setUint8(offset + i, string.charCodeAt(i));
        }
    }
}


来源:https://stackoverflow.com/questions/56948350/how-to-record-and-playback-within-browser-html-5-audio

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!