使用wavesurfer.js自定义波形绘制,集成频谱、时间轴、缩放、区域选择等插件
使用wavesurfer.js自定义波形绘制,集成频谱、时间轴、缩放、区域选择等插件;
wavesurfer.js 官网
<template><div><div><div ref="waveform" class="waveform"></div><div v-if="loading" class="loading-indicator">音频加载中... {{ progress }}%<div class="progress-bar"><div class="progress-fill" :style="{ width: progress + '%' }"></div></div></div><div v-if="error" class="error-message">{{ error }}</div></div><div v-if="canZoom" style="display: inline"><span class="zoom-tips">Tips:鼠标滚动时,可以放大或缩小波形; </span><span v-if="zoomLevel > 0"> 当前缩放级别:{{ zoomLevel }}(px/s); </span><a-popover title="说明:"><template slot="content"><span style="color: #6a5acd">1、鼠标滚轮,可以放大或缩小波形!<br />2、点击选区,可以播放该区域音频!<br />3、拖动选区边缘,可以调整选区大小!<br />4、拖动选区中间,可以移动选区位置!<br />5、移动【吸气相】和【呼气相】选区后,可以重新计算特征值!</span><p /><img:src="'http://116.162.220.48:8888/image/ls.png'"style="max-width: 500px; font-size: 12px; font-style: italic"/></template><a-icon type="question-circle" style="font-size: 16px; margin-left: 10px" /></a-popover></div><div style="display: inline; margin-left: 20px"><a-button type="dashed" icon="save" @click="downloadAudio" :disabled="loading">下载音频</a-button></div><div v-if="inhaleRegion || exhaleRegion" class="region-container"><div class="region-text"><span v-if="inhaleRegion" class="region-item"><span style="color: rgba(180, 84, 240, 0.5)">吸气相区域:</span>{{ formatTimeNumber(inhaleRegion.start) }}秒 ~ {{ formatTimeNumber(inhaleRegion.end) }}秒 ({{formatTimeNumber(inhaleRegion.end - inhaleRegion.start)}}秒)</span><span v-if="exhaleRegion" class="region-item"><span style="color: rgba(34, 139, 34, 0.5)">呼气相区域:</span>{{ formatTimeNumber(exhaleRegion.start) }}秒 ~ {{ formatTimeNumber(exhaleRegion.end) }}秒 ({{formatTimeNumber(exhaleRegion.end - exhaleRegion.start)}}秒)</span></div><a-popconfirm title="确定重新计算特征值吗,会覆盖旧值?" @confirm="regionUpdate"><a-button type="primary" :disabled="!inhaleRegion || !exhaleRegion"> 计算特征值 </a-button></a-popconfirm></div></div>
</template><script>
import Vue from 'vue'
import { ROLE_ID } from '@/store/mutation-types'
import axios from 'axios'
import WaveSurfer from 'wavesurfer.js'
import Spectrogram from 'wavesurfer.js/dist/plugins/spectrogram.esm.js'
import TimelinePlugin from 'wavesurfer.js/dist/plugins/timeline.esm.js'
import ZoomPlugin from 'wavesurfer.js/dist/plugins/zoom.esm.js'
import RegionsPlugin from 'wavesurfer.js/dist/plugins/regions.esm.js'
import { downloadAsWav } from '@/utils/util'export default {name: 'MyWaveSurfer',props: {audioUrl: String, // 总音频文件URLindexBean: Object, // 包含startIndex和endIndex的对象},data() {return {wavesurfer: null,spectrogramPlugin: null,timelinePlugin: null,zoomPlugin: null,regionsPlugin: null,audioBlob: null, // 存储原始音频BlobsegmentBlob: null, // 存储音频片段Blobloading: false,error: null,progress: 0,currentSegmentBlobUrl: null,fullAudioBlobUrl: null,zoomLevel: 110, //默认1秒显示的像素canZoom: false,inhaleRegion: null, // 吸气相区域exhaleRegion: null, // 呼气相区域}},watch: {audioUrl: {// immediate: true, // 立即执行async handler(newVal) {await this.loadFullAudio(newVal)},},indexBean: {deep: true,handler(newVal) {if (this.audioBlob) {this.loadAudioSegment(newVal)}},},},async mounted() {var roleId = Vue.ls.get(ROLE_ID)if (roleId == 11 || roleId == 15) {this.canZoom = true} else {this.canZoom = false}console.log('MyWaveSurfer','audioUrl=',this.audioUrl,'indexBean=',this.indexBean.positionFileName,'roleId=',roleId,'canZoom=',this.canZoom)await this.initWaveSurfer()},beforeDestroy() {this.cleanup()},methods: {async downloadAudio() {console.log('下载音频', this.indexBean.positionFileName, this.fullAudioBlobUrl)if (this.currentSegmentBlobUrl && this.indexBean.positionFileName !== '全部') {downloadAsWav(this.currentSegmentBlobUrl, this.indexBean.positionFileName || 'audio.wav')} else if (this.fullAudioBlobUrl) {downloadAsWav(this.fullAudioBlobUrl, 'raw.wav')} else {console.error('没有可用的音频数据进行下载')}},/*** 将 PCM 数据转换为 WAV 格式* @param {ArrayBuffer} pcmData - PCM 音频数据* @param {number} sampleRate - 采样率(默认 44100)* @param {number} numChannels - 声道数(默认 1)* @param {number} bitsPerSample - 位深(默认 16)* @returns {Blob} - WAV 格式的 Blob*/pcmToWav(pcmData, sampleRate = 8000, numChannels = 1, bitsPerSample = 16) {const bytesPerSample = bitsPerSample / 8const blockAlign = numChannels * bytesPerSampleconst byteRate = sampleRate * blockAlignconst dataSize = pcmData.byteLength// WAV 文件头(44字节)const buffer = new ArrayBuffer(44 + dataSize)const view = new DataView(buffer)// RIFF 头this.writeString(view, 0, 'RIFF')view.setUint32(4, 36 + dataSize, true) // 文件总大小this.writeString(view, 8, 'WAVE')// fmt 子块this.writeString(view, 12, 'fmt ')view.setUint32(16, 16, true) // fmt 块大小view.setUint16(20, 1, true) // PCM 格式view.setUint16(22, numChannels, true) // 声道数view.setUint32(24, sampleRate, true) // 采样率view.setUint32(28, byteRate, true) // 字节率view.setUint16(32, blockAlign, true) // 块对齐view.setUint16(34, bitsPerSample, true) // 位深// data 子块this.writeString(view, 36, 'data')view.setUint32(40, dataSize, true) // 数据大小// 写入 PCM 数据const pcmView = new Uint8Array(pcmData)const wavView = new Uint8Array(buffer, 44)wavView.set(pcmView)return new Blob([buffer], { type: 'audio/wav' })},// 辅助函数:写入字符串到 DataViewwriteString(view, offset, string) {for (let i = 0; i < string.length; i++) {view.setUint8(offset + i, string.charCodeAt(i))}},async initWaveSurfer() {console.log('初始化 WaveSurfer', this.zoomLevel)// 初始化波形和频谱图this.wavesurfer = WaveSurfer.create({container: this.$refs.waveform,waveColor: '#48a1e0',progressColor: '#25ebd7',cursorColor: '#333',height: 150,sampleRate: 8000,// minPxPerSec: this.zoomLevel, // 每秒至少 110 像素(缩放)// dragToSeek: true,// fillParent: true,mediaControls: true,// 自定义渲染函数renderFunction: (channels, ctx) => {const width = ctx.canvas.widthconst height = ctx.canvas.heightconst centerY = height / 2const maxMillimeter = 5 * 5 // 与原有逻辑一致:25格const zoom = height / maxMillimeterconst gain = this.gain || 10 // 可从 data 或 props 获取动态值const STEP_SIZE = 50// ====== Step 1: 清除画布并绘制网格背景 ======ctx.clearRect(0, 0, width, height)// 绘制网格线const drawGrid = (ctx, step) => {ctx.strokeStyle = '#ccc'ctx.lineWidth = 1const rowSpace = (height / maxMillimeter) * step// 垂直线(时间轴方向)for (let x = 0; x * rowSpace <= width; x++) {ctx.beginPath()ctx.moveTo(x * rowSpace, 0)ctx.lineTo(x * rowSpace, height)ctx.stroke()}// 水平线for (let y = 0; y <= maxMillimeter; y += step) {const yPos = y * (height / maxMillimeter)ctx.beginPath()ctx.moveTo(0, yPos)ctx.lineTo(width, yPos)ctx.stroke()}}// 先画细线(每1mm)// drawGrid(ctx, 1)// 再画粗线(每5mm)ctx.lineWidth = 2drawGrid(ctx, 5)// ====== Step 2: 获取第一个声道数据 ======if (!channels || channels.length === 0) returnconst floatData = channels[0] // 单声道或左声道;Float32Array, 范围 [-1, 1]// 方法1:Float32Array 转换为 Int16Arrayconst data = new Int16Array(floatData.length)for (let i = 0; i < floatData.length; i++) {// 映射 [-1, 1] -> [-32768, 32767]data[i] = Math.max(-32768, Math.min(32767, Math.floor(floatData[i] * 32768)))}// ====== Step 3: 计算绘图参数 ======const dataSize = data.length// const pixelRatio = window.devicePixelRatio || 1// const totalPixels = Math.round(width * pixelRatio)// const gapX = width / totalPixels // 每个样本占多少视觉像素// const xSize = totalPixels// 缩放因子:将原始采样值映射到电压再乘以增益和zoomconst calcRealMv = (point) => (point * 3.3) / 32767// ====== Step 4: 绘制波形 ======ctx.beginPath()ctx.strokeStyle = '#48a1e0'ctx.lineWidth = 1let lastX = nulllet lastY = nullfor (let i = 0; i < dataSize; i += STEP_SIZE) {const value = data[i]const voltage = calcRealMv(0 - value)let y = Math.floor(voltage * gain * zoom + centerY)y = Math.max(0, Math.min(y, height)) // 钳位在画布范围内const x = (i / dataSize) * widthif (lastX === null) {ctx.moveTo(x, y)} else {ctx.lineTo(x, y)}lastX = xlastY = y}ctx.stroke()},})// 监听用户交互事件this.wavesurfer.on('interaction', () => {this.isUserInteraction = true})// 监听波形图进度变化this.wavesurfer.on('timeupdate', (currentTime) => {if (this.isUserInteraction) {this.isUserInteraction = false // 重置标志}})// 音频加载完成后,强制频谱图重新绘制this.wavesurfer.on('ready', () => {// 销毁旧的频谱图数据,重新绘制新的if (this.spectrogramPlugin) {this.spectrogramPlugin.destroy() // 清除旧数据}if (this.timelinePlugin) {this.timelinePlugin.destroy()}if (this.zoomPlugin) {this.zoomPlugin.destroy()}if (this.regionsPlugin) {this.regionsPlugin.destroy()}const duration = this.wavesurfer.getDuration()this.zoomLevel = Math.round(1100 / Math.round(duration)) // 1100px宽度console.log('音频加载完成,时长:', duration, '秒', 'px', '初始缩放级别:', this.zoomLevel, 'px/s')if (this.zoomLevel < 110) {this.zoomLevel = 110}this.inhaleRegion = nullthis.exhaleRegion = null// 注册频谱图插件this.registerWavePlugin()})this.wavesurfer.on('zoom', (currentZoom) => {this.zoomLevel = Math.round(currentZoom)// console.log('当前缩放级别:', currentZoom)})this.wavesurfer.on('finish', () => {this.wavesurfer.setTime(0)})},registerWavePlugin() {// 注册频谱图插件的方法this.spectrogramPlugin = this.wavesurfer.registerPlugin(Spectrogram.create({// container: this.$refs.spectrogram,height: 150,labels: false,windowFunc: 'hann',}))if (this.canZoom) {this.registerOtherPlugins()}},registerOtherPlugins() {// 注册时间轴插件的方法this.timelinePlugin = this.wavesurfer.registerPlugin(TimelinePlugin.create({height: 15,timeInterval: 0.1, //竖线刻度;时间标记之间的间隔(秒)primaryLabelInterval: 2, // 主标签之间的间隔(秒)secondaryLabelInterval: 1, // 次标签之间的间隔(秒)style: {fontSize: '12px',color: '#666',},}))// 注册缩放插件的方法this.zoomPlugin = this.wavesurfer.registerPlugin(ZoomPlugin.create({scale: 0.01, //每次滚轮滚动的缩放比例。0.5 表示每次滚动放大50%maxZoom: 200, // 最大500像素/秒minZoom: 10, // 最小10像素/秒// scrollParent: false, // 是否允许在缩放时滚动父容器// showControls: true, // 如果插件提供控制UI// dragToSeek: true, // 启用拖动以寻址// slider: true,}))this.wavesurfer.zoom(this.zoomLevel)// 注册区域选择插件的方法// let cssText =// 'color: white; font-weight: bold; line-height: normal; text-align: center; display: flex; align-items: center; justify-content: center; width: 100%; height: 100%; pointer-events: none; white-space: nowrap;'// const span1 = document.createElement('span')// span1.textContent = '吸气相'// span1.style.cssText = cssText// const span2 = document.createElement('span')// span2.textContent = '呼气相'// span2.style.cssText = cssTextthis.regionsPlugin = this.wavesurfer.registerPlugin(RegionsPlugin.create())this.regionsPlugin.addRegion({id: 'inhale',start: 0,end: 0.3,content: '吸气相',// content: span1,minLength: 0.3,maxLength: 10,color: 'rgba(180, 84, 240, 0.3)',})this.regionsPlugin.addRegion({id: 'exhale',start: 1,end: 1.3,content: '呼气相',// content: span2,minLength: 0.3,maxLength: 10,color: 'rgba(34,139,34, 0.3)',})let activeRegion = null// let loop = false// // 监听区域创建事件// this.regionsPlugin.on('region-in', (region) => {// console.log('region-in(进入区域)', region)// })// this.regionsPlugin.on('region-out', (region) => {// console.log('region-out(离开区域)', region)// if (activeRegion === region) {// // if (loop) {// // region.play()// // } else {// // activeRegion = null// // }// activeRegion = null// }// })this.regionsPlugin.on('region-clicked', (region, e) => {e.stopPropagation() // 防止触发波形上的点击activeRegion = regionregion.play(true)// region.setOptions({ color: randomColor() })})this.regionsPlugin.on('region-updated', (region) => {console.log('Updated region', region.id, region.start, region.end)if (region.id === 'inhale') {this.inhaleRegion = region} else if (region.id === 'exhale') {this.exhaleRegion = region} else {this.inhaleRegion = nullthis.exhaleRegion = null}})},formatTimeNumber(num) {// 检查是否为整数if (Number.isInteger(num)) {return num.toString() // 整数直接返回,不加 .0} else {return num.toFixed(2) // 小数保留1位}},async loadFullAudio(url) {this.loading = trueif (!url || url.trim() === '') {this.error = '音频URL未提供或为空'console.error('音频URL未提供或为空(wavUrl is null or empty)')this.loading = falsereturn}try {const response = await fetch(url)this.audioBlob = await response.blob()// 创建完整音频URLthis.fullAudioBlobUrl = URL.createObjectURL(this.audioBlob)// 加载到WaveSurferawait this.wavesurfer.loadBlob(this.audioBlob)this.error = null} catch (err) {console.log('加载音频失败:', err)this.error = '音频加载失败: ' + err.message} finally {this.loading = false}},// 区域更新后触发async regionUpdate() {if (this.inhaleRegion && this.exhaleRegion) {console.log('indexBean', this.indexBean.positionFileName)let inArrayBuffer, exArrayBufferif (this.segmentBlob && this.indexBean.positionFileName !== '全部') {inArrayBuffer = await this.extractAudioBufferByTime(this.segmentBlob,false,this.inhaleRegion.start,this.inhaleRegion.end)exArrayBuffer = await this.extractAudioBufferByTime(this.segmentBlob,false,this.exhaleRegion.start,this.exhaleRegion.end)} else {inArrayBuffer = await this.extractAudioBufferByTime(this.audioBlob,true,this.inhaleRegion.start,this.inhaleRegion.end)exArrayBuffer = await this.extractAudioBufferByTime(this.audioBlob,true,this.exhaleRegion.start,this.exhaleRegion.end)}this.loading = true// 创建 FormData 对象const formData = new FormData()formData.append('inhale', new Blob([inArrayBuffer], { type: 'application/octet-stream' }))formData.append('exhale', new Blob([exArrayBuffer], { type: 'application/octet-stream' }))// 发送请求axios.post('http://58.57.172.66:8082/aimed/api/algorithm/calculatePower', formData, {headers: {'Content-Type': 'multipart/form-data',},}).then((response) => {this.loading = falseconst newCharc = response.data.dataif (response.status != 200) {this.$message.warning('计算失败:' + response.data.msg)return}if (!newCharc) {this.$message.warning('计算失败,返回结果为空!')return}console.log('计算成功', newCharc)this.updateCharcteristic(newCharc)}).catch((error) => {this.loading = falsethis.$message.warning('计算失败,请重试!')console.error('计算失败', error)})}},updateCharcteristic(data) {const inTimeL = this.inhaleRegion.end - this.inhaleRegion.startconst exTimeL = this.exhaleRegion.end - this.exhaleRegion.startconst hxl = 60 / (inTimeL + exTimeL)// console.log('inTimeL=', inTimeL, 'exTimeL=', exTimeL, 'hxl=', hxl,'特征值=', data)this.$emit('charc-update', {AVG_INHALE_DURATION: inTimeL.toFixed(2),AVG_EXHALE_DURATION: exTimeL.toFixed(2),BREATHING_RATE: hxl.toFixed(2),LOW_BAND_POWER_RATIO: data.LOW_BAND_POWER_RATIO,MID_BAND_POWER_RATIO: data.MID_BAND_POWER_RATIO,})},/*** 从 Blob 中根据开始和结束时间提取 ArrayBuffer* @param {Blob} blob - 音频 Blob(16-bit PCM, 8000 Hz, 单声道)* @param {number} startTime - 开始时间(秒)* @param {number} endTime - 结束时间(秒)* @returns {Promise<ArrayBuffer>} 提取的音频数据 ArrayBuffer*/async extractAudioBufferByTime(blob, hasHeader, startTime, endTime) {if (!blob) {console.error('音频 Blob 未提供', hasHeader)throw new Error('音频 Blob 未提供')}// 1. 转为 ArrayBufferconst fullArrayBuffer = await blob.arrayBuffer()// 2. 音频参数const sampleRate = 8000 // 采样率const bitsPerSample = 16 // 位深const channels = 1 // 单声道const bytesPerSample = bitsPerSample / 8 // 2 字节 per sample// 3. 计算每秒对应的字节数const bytesPerSecond = sampleRate * bytesPerSample * channelslet audioData = fullArrayBufferif (hasHeader) {// 4. 跳过 WAV header(44 字节)const headerOffset = 44if (fullArrayBuffer.byteLength < headerOffset) {throw new Error('Invalid WAV file: too small to have a header')}// 真正的音频数据(去掉 header)audioData = fullArrayBuffer.slice(headerOffset)}// 5. 计算时间对应的数据偏移(字节)const startByte = Math.floor(startTime * bytesPerSecond)const endByte = Math.floor(endTime * bytesPerSecond)// 边界检查if (startByte < 0 || endByte > audioData.byteLength || startByte >= endByte) {throw new Error('Time range out of bounds')}// 6. 截取目标区域const extractedBuffer = audioData.slice(startByte, endByte)return extractedBuffer},// 根据听诊区的域加载音频片段async loadAudioSegment(segment) {if (!segment || (!segment.startIndex && !segment.endIndex)) {// 回退到完整音频console.log('回退到完整音频')await this.wavesurfer.loadBlob(this.audioBlob)return}try {this.loading = trueconst arrayBuffer = await this.audioBlob.arrayBuffer()const segmentData = this.extractWavSegment(arrayBuffer, segment.startIndex + 44, segment.endIndex + 44)const blob = new Blob([segmentData], { type: 'audio/wav' })const segmentUrl = URL.createObjectURL(blob)this.segmentBlob = blobawait this.wavesurfer.loadBlob(blob)// 释放旧URL并保存新URLif (this.currentSegmentBlobUrl) {URL.revokeObjectURL(this.currentSegmentBlobUrl)}this.currentSegmentBlobUrl = segmentUrl} catch (err) {this.error = '片段加载失败: ' + err.message} finally {this.loading = false}},extractWavSegment(arrayBuffer, start, end) {// 实现WAV文件截取逻辑const header = arrayBuffer.slice(0, 44)const data = arrayBuffer.slice(44)// 创建新的数据视图const headerView = new DataView(header)const newData = data.slice(start, end)// 更新WAV头信息headerView.setUint32(4, 36 + newData.byteLength, true) // ChunkSizeheaderView.setUint32(40, newData.byteLength, true) // Subchunk2Size// 合并头和音频数据const newWav = new Uint8Array(header.byteLength + newData.byteLength)newWav.set(new Uint8Array(header), 0)newWav.set(new Uint8Array(newData), header.byteLength)return newWav.buffer},resetComponentState() {// 停止播放this.stopPlay()// 清空波形图if (this.wavesurfer) {this.wavesurfer.empty()}// 重置状态this.progress = 0this.error = nullthis.isUserInteraction = false// 如果你希望每次都重新初始化 WaveSurfer(可选)// this.cleanup()// this.initWaveSurfer()},cleanup() {// 释放URL对象if (this.currentSegmentBlobUrl) {URL.revokeObjectURL(this.currentSegmentBlobUrl)}if (this.wavesurfer) {this.wavesurfer.destroy()}},stopPlay() {if (this.wavesurfer) {this.wavesurfer.pause()}},convertPcmToWav(pcmData) {const sampleRate = 8000const numChannels = 1const bitsPerSample = 16const byteRate = (sampleRate * numChannels * bitsPerSample) / 8const blockAlign = (numChannels * bitsPerSample) / 8const dataLength = pcmData.byteLengthconst buffer = new ArrayBuffer(44 + dataLength)const view = new DataView(buffer)this.writeString(view, 0, 'RIFF')view.setUint32(4, 36 + dataLength, true)this.writeString(view, 8, 'WAVE')this.writeString(view, 12, 'fmt ')view.setUint32(16, 16, true)view.setUint16(20, 1, true)view.setUint16(22, numChannels, true)view.setUint32(24, sampleRate, true)view.setUint32(28, byteRate, true)view.setUint16(32, blockAlign, true)view.setUint16(34, bitsPerSample, true)this.writeString(view, 36, 'data')view.setUint32(40, dataLength, true)const pcmView = new Uint8Array(pcmData)const wavView = new Uint8Array(buffer, 44)wavView.set(pcmView)return buffer},writeString(view, offset, string) {for (let i = 0; i < string.length; i++) {view.setUint8(offset + i, string.charCodeAt(i))}},},
}
</script><style>
.zoom-tips {margin: 18px 0;font-size: 16px;color: #ff0000;span {color: #000;}
}
.waveform {width: 1100px;/* overflow: hidden;position: relative;max-width: 100% !important; */
}
.loading-indicator {position: absolute;top: 50%;left: 50%;transform: translate(-50%, -50%);color: #333;background: rgba(255, 255, 255, 0.9);padding: 10px 20px;border-radius: 8px;font-size: 14px;text-align: center;
}.progress-bar {width: 100%;height: 6px;background: #eee;margin-top: 8px;border-radius: 3px;overflow: hidden;
}.progress-fill {height: 100%;background: #48a1e0;transition: width 0.2s;
}.error-message {color: red;font-size: 14px;padding: 10px;background: #ffe5e5;border-radius: 4px;
}.region-container {display: inline-flex;align-items: center;margin: auto;margin-left: 20px;gap: 20px;/* width: 35%; */width: fit-content;border: 1px dashed #333; /* 1像素宽的灰色虚线边框 */padding: 5px; /* 内边距,使内容不紧贴边框 */
}.region-text {display: flex;flex-direction: column;align-items: flex-start;white-space: nowrap;
}.region-item {margin-bottom: 4px; /* 调整两个span之间的间距 */
}.region-item:last-child {margin-bottom: 0;
}
</style>