实现方式一:
vue中使用wavesurfer.js绘制波形图和频谱图
安装colorMap:
npm install --save colormap
1、单个频谱图
效果:
源码:
<template>
<div class="spectrogram-container">
<canvas ref="canvas" width="1140" height="150" style="background: #000"></canvas>
<div class="audio-controls">
<audio ref="audioPlayer" controls @play="startPlay" controlsList="nodownload noplaybackrate"></audio>
</div>
</div>
</template>
<script>
import axios from 'axios'
import Vue from 'vue'
import { ACCESS_TOKEN } from '@/store/mutation-types'
import colormap from 'colormap'
// 简易 FFT 实现(Hamming 窗 + 蝶形运算)
// 简易 FFT 实现(Hamming 窗 + 蝶形运算)
function createFFT(size) {
const table = new Float32Array(size)
for (let i = 0; i < size; i++) {
table[i] = 0.54 - 0.46 * Math.cos((2 * Math.PI * i) / (size - 1)) // Hamming Window
}
return (input) => {
const n = input.length
const logN = Math.log2(n)
if (n !== 1 << logN) throw new Error('FFT length must be power of 2')
const re = new Float32Array(n)
const im = new Float32Array(n)
for (let i = 0; i < n; i++) {
re[i] = input[i] * table[i]
im[i] = 0
}
// 位逆序置换
for (let i = 1, j = 0; i < n - 1; i++) {
let k = n >> 1
while (j >= k) {
j -= k
k >>= 1
}
j += k
if (i < j) {
;[re[i], re[j]] = [re[j], re[i]]
;[im[i], im[j]] = [im[j], im[i]]
}
}
// 蝶形计算
for (let size = 2; size <= n; size <<= 1) {
const half = size >> 1
const angle = (-2 * Math.PI) / size
const w = [1, 0]
const wStep = [Math.cos(angle), Math.sin(angle)]
for (let i = 0; i < half; i++) {
for (let j = i; j < n; j += size) {
const l = j + half
const tRe = w[0] * re[l] - w[1] * im[l]
const tIm = w[0] * im[l] + w[1] * re[l]
re[l] = re[j] - tRe
im[l] = im[j] - tIm
re[j] += tRe
im[j] += tIm
}
const tmp = w[0] * wStep[0] - w[1] * wStep[1]
w[1] = w[0] * wStep[1] + w[1] * wStep[0]
w[0] = tmp
}
}
// ✅ 修复:使用 n/2,而不是外部不存在的 half
const spectrum = new Float32Array(n / 2)
for (let i = 0; i < n / 2; i++) {
const mag = Math.sqrt(re[i] ** 2 + im[i] ** 2)
spectrum[i] = Math.log10(mag + 1) * 100
}
return spectrum
}
}
export default {
name: 'AudioWaveform',
props: ['audioUrl'],
data() {
return {
fileData: new Int8Array(0),
isPlaying: false,
sampleRate: 8000, // ✅ 改为你的实际采样率
interval: 100,
index: 0,
mWidth: 0,
mHeight: 0,
audio: null,
animationId: null,
// 频谱相关
spectrogram: [], // 频谱数据:每一列是一个时间帧的频谱
colorMap: [], // colormap 生成的颜色
fftSize: 1024, // FFT 大小(必须是 2 的幂)
xSize: 300, // 频谱图最大列数(由 canvas 宽度决定)
barWidth: 1, // 每一列的宽度
binHeight: 1, // 每个频率 bin 的高度
}
},
watch: {
audioUrl(newVal) {
this.handleAudioUrl(newVal)
},
},
mounted() {
this.mWidth = this.$refs.canvas.width
this.mHeight = this.$refs.canvas.height
// this.xSize = Math.ceil(this.mWidth / 2) // 最大列数
this.xSize = Math.max(130, this.mWidth / 2) // 确保至少能显示13秒的数据
this.barWidth = Math.max(1, Math.floor(this.mWidth / this.xSize))
this.binHeight = this.mHeight / (this.fftSize / 2)
// 初始化 colormap
this.colorMap = colormap({
colormap: 'magma', // 替换为上述任意名称
nshades: 256, // 颜色分段数
format: 'rgbaString', // 输出格式
alpha: 1, // 透明度
})
this.audio = this.$refs.audioPlayer
this.handleAudioUrl(this.audioUrl)
},
methods: {
// 计算使3.2秒音频刚好撑满屏幕的xSize
calculateXSizeFor3_2Seconds() {
const targetDuration = 3.2 // 3.2秒
const totalFrames = (this.sampleRate * targetDuration) / this.fftSize
// 两种计算方式确保精度:
// 方式1:基于总样本数
this.xSize = Math.floor((this.sampleRate * targetDuration) / (this.fftSize / 2))
// 方式2:基于画布宽度和期望的时间分辨率
// const timePerPixel = (targetDuration * 1000) / this.mWidth; // ms/px
// this.xSize = Math.floor((targetDuration * 1000) / timePerPixel);
// 限制最小和最大值
this.xSize = Math.max(10, Math.min(this.xSize, this.mWidth))
// 重新计算每列宽度
this.barWidth = this.mWidth / this.xSize
console.log(`3.2秒显示优化:
采样率=${this.sampleRate}Hz,
FFT大小=${this.fftSize},
画布宽度=${this.mWidth}px,
最终xSize=${this.xSize},
每列宽度=${this.barWidth}px`)
},
handleAudioUrl(audioUrl) {
console.log('加载音频:', audioUrl)
if (!audioUrl) return
// 停止当前播放并重置状态
this.resetComponent()
if (audioUrl.endsWith('.pcm')) {
this.loadPcmAudio(audioUrl)
} else {
this.downloadAudio(audioUrl)
}
},
resetComponent() {
this.stopPlayback()
this.fileData = new Float32Array(0)
this.index = 0
this.spectrogram = []
this.clearCanvas()
// 释放之前的音频URL
if (this.audio.src) {
URL.revokeObjectURL(this.audio.src)
this.audio.src = ''
}
},
loadPcmAudio(url) {
fetch(url, {
method: 'GET',
headers: { 'X-Mintti-Web-Token': Vue.ls.get(ACCESS_TOKEN) },
})
.then((res) => res.arrayBuffer())
.then((buffer) => this.initAudioPlayer(buffer))
.catch((err) => {
console.error('PCM 加载失败:', err)
this.$message.warning('音频加载失败')
})
},
downloadAudio(url) {
axios
.get(url, { responseType: 'arraybuffer' })
.then((res) => this.initAudioPlayer(res.data))
.catch((err) => {
console.error('下载失败:', err)
this.$message.warning('音频下载失败')
})
},
initAudioPlayer(arraybuffer) {
const uint8 = new Uint8Array(arraybuffer)
const isWav = uint8[0] === 82 && uint8[1] === 73 && uint8[2] === 70 // 'RIFF'
const dataStart = isWav ? 44 : 0
// 处理16位PCM数据
const pcmData = new Int16Array(arraybuffer.slice(dataStart))
this.fileData = new Float32Array(pcmData.length)
// 16位PCM转Float32 (-32768~32767 -> -1~1)
for (let i = 0; i < pcmData.length; i++) {
this.fileData[i] = pcmData[i] / 32768.0
}
// 创建WAV文件用于播放
const wavHeader = this.createWavHeader(pcmData.length)
const wavData = new Uint8Array(wavHeader.byteLength + arraybuffer.byteLength - dataStart)
wavData.set(new Uint8Array(wavHeader), 0)
wavData.set(uint8.subarray(dataStart), wavHeader.byteLength)
const blob = new Blob([wavData], { type: 'audio/wav' })
const url = URL.createObjectURL(blob)
this.audio.src = url
this.audio.load()
// 监听事件
this.audio.addEventListener('play', () => (this.isPlaying = true))
this.audio.addEventListener('pause', () => (this.isPlaying = false))
this.audio.addEventListener('ended', () => this.stopPlayback())
this.audio.addEventListener('seeked', () => {
this.index = Math.floor((this.audio.currentTime * 1000) / this.interval)
this.spectrogram = []
})
// 加载完成后计算优化参数
this.calculateXSizeFor3_2Seconds()
},
// 创建WAV文件头
createWavHeader(dataLength) {
const buffer = new ArrayBuffer(44)
const view = new DataView(buffer)
// RIFF标识
this.writeString(view, 0, 'RIFF')
// 文件长度
view.setUint32(4, 36 + dataLength * 2, true)
// WAVE标识
this.writeString(view, 8, 'WAVE')
// fmt子块
this.writeString(view, 12, 'fmt ')
// fmt长度
view.setUint32(16, 16, true)
// 编码方式: 1表示PCM
view.setUint16(20, 1, true)
// 声道数
view.setUint16(22, 1, true)
// 采样率
view.setUint32(24, this.sampleRate, true)
// 字节率
view.setUint32(28, this.sampleRate * 2, true)
// 块对齐
view.setUint16(32, 2, true)
// 位深度
view.setUint16(34, 16, true)
// data标识
this.writeString(view, 36, 'data')
// data长度
view.setUint32(40, dataLength * 2, true)
return buffer
},
writeString(view, offset, string) {
for (let i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i))
}
},
startPlay() {
if (this.audio && this.fileData.length > 0) {
// 重置状态
if (this.audio.ended || this.index * this.interval >= this.fileData.length / (this.sampleRate / 1000)) {
this.stopPlayback()
}
// 同步起始时间
this.lastUpdateTime = performance.now()
this.audio.play()
this.isPlaying = true
this.timer()
}
},
timer() {
if (!this.isPlaying) return
// 计算基于音频时间的理想帧数
const targetFrame = Math.floor((this.audio.currentTime * 1000) / this.interval)
const maxCatchUpFrames = 5 // 最大追赶帧数,避免卡顿
// 适度追赶,避免一次性处理太多帧导致卡顿
let framesToUpdate = Math.min(targetFrame - this.index, maxCatchUpFrames)
while (framesToUpdate > 0) {
this.refreshData()
framesToUpdate--
}
// 正常情况下一帧一帧更新
if (framesToUpdate === 0 && this.index < targetFrame) {
this.refreshData()
}
this.animationId = requestAnimationFrame(() => this.timer())
},
refreshData() {
// 计算每帧推进的样本数,使3.2秒刚好撑满
const samplesPerFrame = Math.floor((this.sampleRate * 3.2) / this.xSize)
const start = this.index * samplesPerFrame
const end = start + this.fftSize
if (start >= this.fileData.length) {
this.stopPlayback()
return
}
let segment
if (end > this.fileData.length) {
segment = new Float32Array(this.fftSize)
segment.set(this.fileData.slice(start))
} else {
segment = this.fileData.slice(start, end)
}
// 执行FFT
const fft = createFFT(this.fftSize)
const spectrum = fft(segment)
// 归一化
const maxDB = 0,
minDB = -80
const normalized = spectrum.map((v) => {
const dbValue = 20 * Math.log10(v + 1e-6)
return Math.max(0, Math.min(255, Math.floor(((dbValue - minDB) / (maxDB - minDB)) * 255)))
})
// 更新频谱数据
if (this.spectrogram.length >= this.xSize) {
this.spectrogram.shift()
}
this.spectrogram.push(normalized)
this.drawSpectrogram()
this.index += 1
},
drawSpectrogram() {
const ctx = this.$refs.canvas.getContext('2d')
const { width, height } = ctx.canvas
ctx.clearRect(0, 0, width, height)
const dx = width / Math.max(this.xSize, this.spectrogram.length)
for (let x = 0; x < this.spectrogram.length; x++) {
const spec = this.spectrogram[x]
const canvasX = width - (this.spectrogram.length - x) * dx
for (let y = 0; y < spec.length; y++) {
// 使用对数缩放增强低频显示
const freqIndex = Math.floor(Math.pow(y / spec.length, 0.7) * spec.length)
const colorIdx = Math.max(0, Math.min(255, spec[freqIndex]))
ctx.fillStyle = this.colorMap[colorIdx]
// 低频在底部,高频在顶部
const pixelY = height - y * (height / spec.length)
ctx.fillRect(canvasX, pixelY, dx, height / spec.length)
}
}
},
stopPlayback() {
this.isPlaying = false
this.audio.pause()
this.audio.currentTime = 0 // 重置播放位置
this.index = 0 // 重置频谱索引
this.spectrogram = [] // 清空频谱数据
this.clearAnimation()
this.clearCanvas()
},
clearAnimation() {
if (this.animationId) {
cancelAnimationFrame(this.animationId)
this.animationId = null
}
},
clearCanvas() {
const ctx = this.$refs.canvas.getContext('2d')
ctx.clearRect(0, 0, ctx.canvas.width, ctx.canvas.height)
},
},
beforeDestroy() {
this.stopPlayback()
if (this.audio.src) {
URL.revokeObjectURL(this.audio.src)
}
},
}
</script>
<style scoped>
.spectrogram-container {
display: flex;
flex-direction: column;
align-items: center;
padding: 10px;
font-family: Arial, sans-serif;
}
canvas {
border: 1px solid #333;
border-radius: 4px;
}
.audio-controls {
margin-top: 10px;
}
</style>
2、波形图+ 频谱图
波形图参考:心电波形图EcgView
效果图:
源码:
<template>
<div class="audio-visualizer">
<div class="visualization-container">
<div class="waveform-container">
<canvas ref="waveformCanvas" width="1140" height="150"></canvas>
</div>
<div class="spectrogram-container">
<canvas ref="spectrogramCanvas" width="1140" height="150" style="background: #000"></canvas>
</div>
</div>
<div class="audio-controls">
<audio ref="audioPlayer" controls @play="startPlay" controlsList="nodownload noplaybackrate"></audio>
</div>
</div>
</template>
<script>
import axios from 'axios'
import Vue from 'vue'
import { ACCESS_TOKEN } from '@/store/mutation-types'
import colormap from 'colormap'
export default {
name: 'AudioWaveform',
props: ['audioUrl'],
data() {
return {
// 音频数据相关
fileData: new Int8Array(0),
pcmData: new Float32Array(0),
isPlaying: false,
sampleRate: 8000,
interval: 100, // 统一的时间间隔
index: 0,
// 波形图相关
waveformData: [],
waveformCtx: null,
waveformWidth: 1140,
waveformHeight: 150,
zoom: 0,
gapX: 0.2,
xSize: 0,
maxMillimeter: 5 * 5,
STEP_SIZE: 50,
gain: 5,
maxMidScopeY: 0,
// 频谱图相关
spectrogramCtx: null,
spectrogramWidth: 1140,
spectrogramHeight: 150,
spectrogram: [],
colorMap: [],
fftSize: 1024,
barWidth: 1,
binHeight: 1,
// 播放控制
audio: null,
animationId: null,
lastTime: 0,
}
},
watch: {
audioUrl(newVal) {
this.handleAudioUrl(newVal)
},
},
mounted() {
// 初始化波形图
this.waveformCtx = this.$refs.waveformCanvas.getContext('2d')
this.waveformWidth = this.$refs.waveformCanvas.width
this.waveformHeight = this.$refs.waveformCanvas.height
this.drawBg(this.waveformCtx)
this.initWaveformParams()
// 初始化频谱图
this.spectrogramCtx = this.$refs.spectrogramCanvas.getContext('2d')
this.spectrogramWidth = this.$refs.spectrogramCanvas.width
this.spectrogramHeight = this.$refs.spectrogramCanvas.height
this.xSize = Math.max(130, this.spectrogramWidth / 2)
this.barWidth = Math.max(1, Math.floor(this.spectrogramWidth / this.xSize))
this.binHeight = this.spectrogramHeight / (this.fftSize / 2)
// 初始化 colormap
this.colorMap = colormap({
colormap: 'magma',
nshades: 256,
format: 'rgbaString',
alpha: 1,
})
this.audio = this.$refs.audioPlayer
if (this.audioUrl) {
this.handleAudioUrl(this.audioUrl)
}
},
methods: {
// 初始化波形图参数
initWaveformParams() {
this.zoom = this.waveformHeight / this.maxMillimeter
// 计算每像素对应的秒数
const secondsPerPixel = 0.04 / this.zoom
// 计算gapX确保波形填满画布
const samplesPerPixel = this.sampleRate * secondsPerPixel
this.gapX = Math.max(1, samplesPerPixel / this.STEP_SIZE)
this.xSize = Math.ceil(this.waveformWidth / this.gapX)
console.log(`波形参数:gapX=${this.gapX}, xSize=${this.xSize}`)
},
// 处理音频URL
handleAudioUrl(audioUrl) {
if (!audioUrl) return
this.resetComponent()
if (audioUrl.endsWith('.pcm')) {
this.loadPcmAudio(audioUrl)
} else {
this.downloadAudio(audioUrl)
}
},
// 重置组件状态
resetComponent() {
this.stopPlayback()
this.fileData = new Int8Array(0)
this.pcmData = new Float32Array(0)
this.index = 0
this.waveformData = []
this.spectrogram = []
this.clearCanvas()
if (this.audio && this.audio.src) {
URL.revokeObjectURL(this.audio.src)
this.audio.src = ''
}
},
// 加载PCM音频
loadPcmAudio(url) {
fetch(url, {
method: 'GET',
headers: { 'X-Mintti-Web-Token': Vue.ls.get(ACCESS_TOKEN) },
})
.then((res) => res.arrayBuffer())
.then((buffer) => this.initAudioPlayer(buffer))
.catch((err) => {
console.error('PCM 加载失败:', err)
this.$message.warning('音频加载失败')
})
},
// 下载音频
downloadAudio(url) {
axios
.get(url, { responseType: 'arraybuffer' })
.then((res) => this.initAudioPlayer(res.data))
.catch((err) => {
console.error('下载失败:', err)
this.$message.warning('音频下载失败')
})
},
// 初始化音频播放器
initAudioPlayer(arraybuffer) {
const uint8 = new Uint8Array(arraybuffer)
const isWav = uint8[0] === 82 && uint8[1] === 73 && uint8[2] === 70 // 'RIFF'
const dataStart = isWav ? 44 : 0
// 存储原始文件数据
this.fileData = new Int8Array(arraybuffer.slice(dataStart))
// 处理16位PCM数据
const pcmData = new Int16Array(arraybuffer.slice(dataStart))
this.pcmData = new Float32Array(pcmData.length)
// 16位PCM转Float32 (-32768~32767 -> -1~1)
for (let i = 0; i < pcmData.length; i++) {
this.pcmData[i] = pcmData[i] / 32768.0
}
// 创建WAV文件用于播放
const wavHeader = this.createWavHeader(pcmData.length)
const wavData = new Uint8Array(wavHeader.byteLength + arraybuffer.byteLength - dataStart)
wavData.set(new Uint8Array(wavHeader), 0)
wavData.set(uint8.subarray(dataStart), wavHeader.byteLength)
const blob = new Blob([wavData], { type: 'audio/wav' })
const url = URL.createObjectURL(blob)
this.audio.src = url
this.audio.load()
// 监听事件
this.audio.addEventListener('play', () => (this.isPlaying = true))
this.audio.addEventListener('pause', () => (this.isPlaying = false))
this.audio.addEventListener('ended', () => this.stopPlayback())
this.audio.addEventListener('seeked', () => {
this.index = Math.floor((this.audio.currentTime * 1000) / this.interval)
this.waveformData = []
this.spectrogram = []
})
},
// 创建WAV文件头
createWavHeader(dataLength) {
const buffer = new ArrayBuffer(44)
const view = new DataView(buffer)
// RIFF标识
this.writeString(view, 0, 'RIFF')
// 文件长度
view.setUint32(4, 36 + dataLength * 2, true)
// WAVE标识
this.writeString(view, 8, 'WAVE')
// fmt子块
this.writeString(view, 12, 'fmt ')
// fmt长度
view.setUint32(16, 16, true)
// 编码方式: 1表示PCM
view.setUint16(20, 1, true)
// 声道数
view.setUint16(22, 1, true)
// 采样率
view.setUint32(24, this.sampleRate, true)
// 字节率
view.setUint32(28, this.sampleRate * 2, true)
// 块对齐
view.setUint16(32, 2, true)
// 位深度
view.setUint16(34, 16, true)
// data标识
this.writeString(view, 36, 'data')
// data长度
view.setUint32(40, dataLength * 2, true)
return buffer
},
writeString(view, offset, string) {
for (let i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i))
}
},
// 开始播放
startPlay() {
if (this.audio && this.fileData.length > 0) {
// 重置状态
if (this.audio.ended || this.index * this.interval >= this.pcmData.length / (this.sampleRate / 1000)) {
this.stopPlayback()
}
// 同步起始时间
this.lastTime = performance.now()
this.audio.play()
this.isPlaying = true
this.timer()
}
},
// 定时器
timer() {
if (!this.isPlaying) return
// 计算基于音频时间的理想帧数
const targetFrame = Math.floor((this.audio.currentTime * 1000) / this.interval)
const maxCatchUpFrames = 5 // 最大追赶帧数,避免卡顿
// 适度追赶,避免一次性处理太多帧导致卡顿
let framesToUpdate = Math.min(targetFrame - this.index, maxCatchUpFrames)
while (framesToUpdate > 0) {
this.refreshData()
framesToUpdate--
}
// 正常情况下一帧一帧更新
if (framesToUpdate === 0 && this.index < targetFrame) {
this.refreshData()
}
this.animationId = requestAnimationFrame(() => this.timer())
},
// 刷新数据
refreshData() {
// 处理波形图数据 - 每次更新1600字节数据
const start = this.index * 1600
const end = start + 1600
if (start >= this.fileData.length) {
this.stopPlayback()
return
}
const byteArray = this.fileData.slice(start, end)
const shortArray = new Int16Array(byteArray.length / 2)
//遍历 byteArray,将每两个字节合并成一个短整型
for (let i = 0; i < byteArray.length; i += 2) {
shortArray[i / 2] = (byteArray[i] & 0xff) | ((byteArray[i + 1] & 0xff) << 8)
}
// 修改波形数据处理部分
for (let i = 0; i < shortArray.length; i += this.STEP_SIZE) {
// 限制波形数据长度,避免内存增长
if (this.waveformData.length >= this.xSize * 2) {
// 适当增加缓冲区
this.waveformData.shift()
}
this.waveformData.push(shortArray[i])
}
// 处理频谱图数据 - 每次更新fftSize/2个样本
const fftStart = this.index * (this.fftSize / 2)
const fftEnd = fftStart + this.fftSize
let segment
if (fftEnd > this.pcmData.length) {
segment = new Float32Array(this.fftSize)
segment.set(this.pcmData.slice(fftStart))
} else {
segment = this.pcmData.slice(fftStart, fftEnd)
}
// 执行FFT
const spectrum = this.fft(segment)
// 归一化
const maxDB = 0,
minDB = -80
const normalized = spectrum.map((v) => {
const dbValue = 20 * Math.log10(v + 1e-6)
return Math.max(0, Math.min(255, Math.floor(((dbValue - minDB) / (maxDB - minDB)) * 255)))
})
// 更新频谱数据
if (this.spectrogram.length >= this.xSize) {
this.spectrogram.shift()
}
this.spectrogram.push(normalized)
// 绘制
this.drawWaveform()
this.drawSpectrogram()
this.index += 1
},
// FFT实现
fft(input) {
const n = input.length
const logN = Math.log2(n)
if (n !== 1 << logN) throw new Error('FFT length must be power of 2')
// 应用Hamming窗
const windowed = new Float32Array(n)
for (let i = 0; i < n; i++) {
windowed[i] = input[i] * (0.54 - 0.46 * Math.cos((2 * Math.PI * i) / (n - 1)))
}
const re = new Float32Array(n)
const im = new Float32Array(n)
for (let i = 0; i < n; i++) {
re[i] = windowed[i]
im[i] = 0
}
// 位逆序置换
for (let i = 1, j = 0; i < n - 1; i++) {
let k = n >> 1
while (j >= k) {
j -= k
k >>= 1
}
j += k
if (i < j) {
;[re[i], re[j]] = [re[j], re[i]]
;[im[i], im[j]] = [im[j], im[i]]
}
}
// 蝶形计算
for (let size = 2; size <= n; size <<= 1) {
const half = size >> 1
const angle = (-2 * Math.PI) / size
const w = [1, 0]
const wStep = [Math.cos(angle), Math.sin(angle)]
for (let i = 0; i < half; i++) {
for (let j = i; j < n; j += size) {
const l = j + half
const tRe = w[0] * re[l] - w[1] * im[l]
const tIm = w[0] * im[l] + w[1] * re[l]
re[l] = re[j] - tRe
im[l] = im[j] - tIm
re[j] += tRe
im[j] += tIm
}
const tmp = w[0] * wStep[0] - w[1] * wStep[1]
w[1] = w[0] * wStep[1] + w[1] * wStep[0]
w[0] = tmp
}
}
// 计算幅度谱
const spectrum = new Float32Array(n / 2)
for (let i = 0; i < n / 2; i++) {
const mag = Math.sqrt(re[i] ** 2 + im[i] ** 2)
spectrum[i] = Math.log10(mag + 1) * 100
}
return spectrum
},
// 绘制波形图
drawWaveform() {
const ctx = this.waveformCtx
ctx.clearRect(0, 0, this.waveformWidth, this.waveformHeight)
this.drawBg(ctx)
ctx.beginPath()
ctx.lineWidth = 1
ctx.strokeStyle = '#48a1e0'
const len = this.waveformData.length
const mCenterY = this.waveformHeight / 2
const maxPoints = Math.ceil(this.waveformWidth / this.gapX)
// 计算可见数据点的起始索引
const startIndex = Math.max(0, len - maxPoints)
// 从右向左绘制所有可见点
for (let i = startIndex; i < len; i++) {
const y = Math.floor(
this.calcRealMv(this.maxMidScopeY - this.waveformData[i]) * this.gain * this.zoom + mCenterY
)
// 关键修改:计算x坐标,确保波形可以从最右移动到最左
const x = this.waveformWidth - (len - i) * this.gapX
if (i === startIndex) {
ctx.moveTo(x, y)
} else {
ctx.lineTo(x, y)
}
}
ctx.stroke()
},
// 绘制频谱图
drawSpectrogram() {
const ctx = this.spectrogramCtx
const { width, height } = ctx.canvas
ctx.clearRect(0, 0, width, height)
const dx = width / Math.max(this.xSize, this.spectrogram.length)
for (let x = 0; x < this.spectrogram.length; x++) {
const spec = this.spectrogram[x]
const canvasX = width - (this.spectrogram.length - x) * dx
for (let y = 0; y < spec.length; y++) {
// 使用对数缩放增强低频显示
const freqIndex = Math.floor(Math.pow(y / spec.length, 0.7) * spec.length)
const colorIdx = Math.max(0, Math.min(255, spec[freqIndex]))
ctx.fillStyle = this.colorMap[colorIdx]
// 低频在底部,高频在顶部
const pixelY = height - y * (height / spec.length)
ctx.fillRect(canvasX, pixelY, dx, height / spec.length)
}
}
},
// 绘制背景网格
drawBg(ctx) {
ctx.lineWidth = 1
this.drawGrid(ctx, this.maxMillimeter)
ctx.lineWidth = 2
this.drawGrid(ctx, this.maxMillimeter / 5)
},
// 绘制网格
drawGrid(ctx, cols) {
const { width, height } = ctx.canvas
ctx.strokeStyle = '#ccc'
const rowSpace = height / cols
// 画竖线
for (let i = 0; i * rowSpace <= width; i++) {
ctx.beginPath()
ctx.moveTo(i * rowSpace, 0)
ctx.lineTo(i * rowSpace, height)
ctx.stroke()
}
// 画横线
for (let i = 0; i <= cols; i++) {
ctx.beginPath()
ctx.moveTo(0, i * rowSpace)
ctx.lineTo(width, i * rowSpace)
ctx.stroke()
}
},
// 计算实际电压值
calcRealMv(point) {
return (point * 3.3) / 32767
},
// 停止播放
stopPlayback() {
this.isPlaying = false
if (this.audio) {
this.audio.pause()
this.audio.currentTime = 0
}
this.index = 0
this.waveformData = []
this.spectrogram = []
this.clearAnimation()
this.clearCanvas()
},
// 清除动画
clearAnimation() {
if (this.animationId) {
cancelAnimationFrame(this.animationId)
this.animationId = null
}
},
// 清除画布
clearCanvas() {
this.waveformCtx.clearRect(0, 0, this.waveformWidth, this.waveformHeight)
this.drawBg(this.waveformCtx)
this.spectrogramCtx.clearRect(0, 0, this.spectrogramWidth, this.spectrogramHeight)
},
},
beforeDestroy() {
this.stopPlayback()
if (this.audio && this.audio.src) {
URL.revokeObjectURL(this.audio.src)
}
},
}
</script>
<style scoped>
.audio-visualizer {
display: flex;
flex-direction: column;
align-items: center;
padding: 2px;
font-family: Arial, sans-serif;
}
.visualization-container {
width: 100%;
display: flex;
flex-direction: column;
gap: 2px;
}
.waveform-container canvas {
border: 1px solid #333;
border-radius: 4px;
}
.spectrogram-container canvas {
border: 1px solid #333;
border-radius: 4px;
}
.audio-controls {
margin-top: 5px;
width: 100%;
}
audio {
width: 100%;
}
</style>