Web front-end real-time audio playback and waveform drawing

Web front-end real-time audio playback and waveform drawing

initial

The latest project requires pulling real-time sound stream data to play the sound and display the corresponding waveform. If it is a sound file, we can use the wavesurfer.js framework to easily draw the waveform, but how to convert real-time sound? The following is my solution
Insert image description here

1. The backend pushes the stream through websocket

The backend needs to convert the data into PCM format
Insert image description here

2. Front-end implementation

We use the following 2 libraries:
1. pcm-player (play sound)
2. recorder-core (draw waveform)

Install:

npm install pcm-player
npm install recorder-core

1. First, we encapsulate the websocket (there are many tutorials on the Internet) and directly enter the code

var ws = null
let lockReconnect = false;
/**
 * @param {*} path ws url
 * @param {*} callback 数据处理回调
 * @param {*} isHandleData 是否处理转换数据
 * @returns ws
 */
const websocket = (path, callback, isHandleData = false) => {
    
    
    cancel();//取消上一次连接
    ws = new WebSocket(path);
    ws.binaryType = "arraybuffer"
    // 建立连接
    ws.onopen = (event) => {
    
    
        console.log('websocket 建立连接');
        // 连接关闭
        ws.onclose = (event) => {
    
    
            console.log('websocket 连接断开,重新连接');
            reconnect(path, callback);
        };
        // 接收到消息
        ws.onmessage = (event) => {
    
    
            const data = handleData(event, isHandleData);
            if (callback) callback(data)
        };
    };
    ws.onerror = (event) => {
    
    
        console.log('websocket 连接失败,重新连接');
        reconnect(path, callback);
    };
    return ws;
}
// 重新连接
const reconnect = (path, callback) => {
    
    
    if (lockReconnect) {
    
    
        return;
    }
    lockReconnect = true;
    setTimeout(function () {
    
    
        console.log("重新链接…")
        lockReconnect = false;
        websocket(path, callback)
    }, 2000);
}

// 处理返回数据
const handleData = (event, isHandleData) => {
    
    
    const data = isHandleData ? JSON.parse(event.data) : event
    return data;
}
// 取消连接 清除ws实例
const cancel = () => {
    
    
    if (ws) {
    
    
        ws.onclose = () => {
    
     }
        ws?.close()
    }
    ws = null
}

export default websocket

2. Use the complete code in vue

<template>
  <div id="wave_audio"></div>
</template>

<script>
import Recorder from 'recorder-core'
import PCMPlayer from 'pcm-player'
//需要使用到的音频格式编码引擎的js文件统统加载进来
import 'recorder-core/src/engine/mp3'
import 'recorder-core/src/engine/mp3-engine'
//以上三个也可以合并使用压缩好的recorder.xxx.min.js
//比如 import Recorder from 'recorder-core/recorder.mp3.min' //已包含recorder-core和mp3格式支持
//可选的扩展支持项
import 'recorder-core/src/extensions/wavesurfer.view'
import websocket from './websocket'
var player = null
var wave = null
export default {
    
    
  mounted() {
    
    
    this.initPlay()
    this.initWave()
    this.initWebsocket(
      'ws://192.168.8.210:8877/live?url=rtmp://139.224.194.14:10085/hls/wrpYcD27g?sign=wrtY5D27gz&&&ffmpeg=true'
    )
  },
  methods: {
    
    
    initPlay() {
    
    
      player = new PCMPlayer({
    
    
        encoding: '16bitInt', //编码 可能的值 8bitInt / 16bitInt / 32bitInt / 32bitFloat 默认值:16bitInt
        channels: 1, // PCM 数据中的通道数
        sampleRate: 32000, // PCM 数据的采样率
        flushTime: 2000, //  以毫秒为单位播放的 PCM 数据的刷新间隔。默认 1000ms
      })
    },
    initWave() {
    
    
      const waveOption = {
    
    
        elem: '#wave_audio',
        scale: 2, //缩放系数,应为正整数,使用2(3? no!)倍宽高进行绘制,避免移动端绘制模糊
        fps: 50, //绘制帧率,不可过高,50-60fps运动性质动画明显会流畅舒适,实际显示帧率达不到这个值也并无太大影响
        duration: 3500, //当前视图窗口内最大绘制的波形的持续时间,此处决定了移动速率
        direction: 1, //波形前进方向,取值:1由左往右,-1由右往左
        position: 0, //绘制位置,取值-1到1,-1为最底下,0为中间,1为最顶上,小数为百分比
        centerHeight: 1, //中线基础粗细,如果为0不绘制中线,position=±1时应当设为0
        //波形颜色配置:[位置,css颜色,...] 位置: 取值0.0-1.0之间
        linear: [0, 'rgba(14, 224, 238, 1)', 1, 'rgba(14, 224, 238, .6)'],
        centerColor: 'rgba(14, 224, 238, 1)', //中线css颜色,留空取波形第一个渐变颜色
      }
      wave = Recorder.WaveSurferView(waveOption)
    },
    initWebsocket(url) {
    
    
      websocket(url, this.handle)
    },
    handle(event) {
    
    
      const dataAudio = new Uint8Array(event.data)
      player && player.feed(dataAudio) // 播放声音
      const data = new Uint16Array(event.data)
      wave && wave.input(data, 20, 32000) // 添加波形数据
    },
    destroyPlay() {
    
    
      player && player.destroy()
      player = null
    },
  },
  beforeDestroy() {
    
    
    this.destroyPlay()
  },
}
</script>

<style lang="less" scoped>
#wave_audio {
    
    
  width: 100%;
  height: 100%;
}
</style>

Guess you like

Origin blog.csdn.net/weixin_45820720/article/details/129057254