瀏覽代碼

添加语音波纹效果

丸子 3 月之前
父節點
當前提交
1987c8fb6f
共有 4 個文件被更改,包括 1311 次插入28 次删除
  1. 713 15
      package-lock.json
  2. 1 0
      package.json
  3. 552 0
      src/components/ai/voice/LiveWaveform .vue
  4. 45 13
      src/components/ai/voice/VoiceInput.vue

文件差異過大導致無法顯示
+ 713 - 15
package-lock.json


+ 1 - 0
package.json

@@ -10,6 +10,7 @@
   },
   "dependencies": {
     "@element-plus/icons-vue": "^2.3.1",
+    "@elevenlabs/cli": "^0.3.3",
     "@microsoft/fetch-event-source": "^2.0.1",
     "@vitejs/plugin-legacy": "^7.0.1",
     "@vue-office/docx": "^1.6.3",

+ 552 - 0
src/components/ai/voice/LiveWaveform .vue

@@ -0,0 +1,552 @@
+<template>
+  <div
+    class="relative h-full w-full"
+    :class="className"
+    ref="containerRef"
+    :style="{ height: heightStyle }"
+    :aria-label="ariaLabel"
+    role="img"
+    v-bind="$attrs"
+  >
+    <div v-if="!active && !processing" class="border-muted-foreground/20 absolute top-1/2 right-0 left-0 -translate-y-1/2 border-t-2 border-dotted" />
+    <canvas
+      class="block h-full w-full"
+      ref="canvasRef"
+      aria-hidden="true"
+    />
+  </div>
+</template>
+
+<script setup lang="ts">
+import { ref, onMounted, onUnmounted, watch, computed } from 'vue'
+
+interface Props {
+  active?: boolean
+  processing?: boolean
+  deviceId?: string
+  barWidth?: number
+  barHeight?: number
+  barGap?: number
+  barRadius?: number
+  barColor?: string
+  fadeEdges?: boolean
+  fadeWidth?: number
+  height?: string | number
+  sensitivity?: number
+  smoothingTimeConstant?: number
+  fftSize?: number
+  historySize?: number
+  updateRate?: number
+  mode?: "scrolling" | "static"
+  onError?: (error: Error) => void
+  onStreamReady?: (stream: MediaStream) => void
+  onStreamEnd?: () => void
+  className?: string
+}
+
+const props = withDefaults(defineProps<Props>(), {
+  active: false,
+  processing: false,
+  deviceId: undefined,
+  barWidth: 3,
+  barHeight: 4,
+  barGap: 1,
+  barRadius: 1.5,
+  barColor: undefined,
+  fadeEdges: true,
+  fadeWidth: 24,
+  height: 64,
+  sensitivity: 1,
+  smoothingTimeConstant: 0.8,
+  fftSize: 256,
+  historySize: 60,
+  updateRate: 30,
+  mode: "static",
+  onError: undefined,
+  onStreamReady: undefined,
+  onStreamEnd: undefined,
+  className: ""
+})
+
+const emit = defineEmits()
+
+const canvasRef = ref<HTMLCanvasElement | null>(null)
+const containerRef = ref<HTMLDivElement | null>(null)
+const historyRef = ref<number[]>([])
+const analyserRef = ref<AnalyserNode | null>(null)
+const audioContextRef = ref<AudioContext | null>(null)
+const streamRef = ref<MediaStream | null>(null)
+const animationRef = ref<number>(0)
+const lastUpdateRef = ref<number>(0)
+const processingAnimationRef = ref<number | null>(null)
+const lastActiveDataRef = ref<number[]>([])
+const transitionProgressRef = ref(0)
+const staticBarsRef = ref<number[]>([])
+const needsRedrawRef = ref(true)
+const gradientCacheRef = ref<CanvasGradient | null>(null)
+const lastWidthRef = ref(0)
+const rafId = ref<number | null>(null)
+
+const heightStyle = computed(() => {
+  return typeof props.height === "number" ? `${props.height}px` : props.height
+})
+
+const ariaLabel = computed(() => {
+  if (props.active) return "Live audio waveform"
+  if (props.processing) return "Processing audio"
+  return "Audio waveform idle"
+})
+
+// Handle canvas resizing
+onMounted(() => {
+  const canvas = canvasRef.value
+  const container = containerRef.value
+  if (!canvas || !container) return
+
+  const resizeObserver = new ResizeObserver(() => {
+    const rect = container.getBoundingClientRect()
+    const dpr = window.devicePixelRatio || 1
+
+    canvas.width = rect.width * dpr
+    canvas.height = rect.height * dpr
+    canvas.style.width = `${rect.width}px`
+    canvas.style.height = `${rect.height}px`
+
+    const ctx = canvas.getContext("2d")
+    if (ctx) {
+      ctx.scale(dpr, dpr)
+    }
+
+    gradientCacheRef.value = null
+    lastWidthRef.value = rect.width
+    needsRedrawRef.value = true
+  })
+
+  resizeObserver.observe(container)
+
+  onUnmounted(() => {
+    resizeObserver.disconnect()
+  })
+})
+
+// Handle processing animation
+watch([() => props.processing, () => props.active, () => props.barWidth, () => props.barGap, () => props.mode], () => {
+  if (props.processing && !props.active) {
+    let time = 0
+    transitionProgressRef.value = 0
+
+    const animateProcessing = () => {
+      time += 0.03
+      transitionProgressRef.value = Math.min(
+        1,
+        transitionProgressRef.value + 0.02
+      )
+
+      const processingData: number[] = []
+      const barCount = Math.floor(
+        (containerRef.value?.getBoundingClientRect().width || 200) /
+          (props.barWidth + props.barGap)
+      )
+
+      if (props.mode === "static") {
+        // 生成静态模式的处理数据
+        const halfCount = Math.floor(barCount / 2)
+
+        // 使用正弦波生成平滑的动画效果
+        for (let i = 0; i < barCount; i++) {
+          const normalizedPosition = (i - halfCount) / halfCount
+          const centerWeight = 1 - Math.abs(normalizedPosition) * 0.4
+
+          const wave1 = Math.sin(time * 1.5 + normalizedPosition * 3) * 0.25
+          const wave2 = Math.sin(time * 0.8 - normalizedPosition * 2) * 0.2
+          const wave3 = Math.cos(time * 2 + normalizedPosition) * 0.15
+          const combinedWave = wave1 + wave2 + wave3
+          const processingValue = (0.2 + combinedWave) * centerWeight
+
+          let finalValue = processingValue
+          if (
+            lastActiveDataRef.value.length > 0 &&
+            transitionProgressRef.value < 1
+          ) {
+            const lastDataIndex = Math.min(
+              i,
+              lastActiveDataRef.value.length - 1
+            )
+            const lastValue = lastActiveDataRef.value[lastDataIndex] || 0
+            finalValue =
+              lastValue * (1 - transitionProgressRef.value) +
+              processingValue * transitionProgressRef.value
+          }
+
+          processingData.push(Math.max(0.05, Math.min(1, finalValue)))
+        }
+      } else {
+        // 生成滚动模式的处理数据
+        for (let i = 0; i < barCount; i++) {
+          const normalizedPosition = (i - barCount / 2) / (barCount / 2)
+          const centerWeight = 1 - Math.abs(normalizedPosition) * 0.4
+
+          const wave1 = Math.sin(time * 1.5 + i * 0.15) * 0.25
+          const wave2 = Math.sin(time * 0.8 - i * 0.1) * 0.2
+          const wave3 = Math.cos(time * 2 + i * 0.05) * 0.15
+          const combinedWave = wave1 + wave2 + wave3
+          const processingValue = (0.2 + combinedWave) * centerWeight
+
+          let finalValue = processingValue
+          if (
+            lastActiveDataRef.value.length > 0 &&
+            transitionProgressRef.value < 1
+          ) {
+            const lastDataIndex = Math.floor(
+              (i / barCount) * lastActiveDataRef.value.length
+            )
+            const lastValue = lastActiveDataRef.value[lastDataIndex] || 0
+            finalValue =
+              lastValue * (1 - transitionProgressRef.value) +
+              processingValue * transitionProgressRef.value
+          }
+
+          processingData.push(Math.max(0.05, Math.min(1, finalValue)))
+        }
+      }
+
+      if (props.mode === "static") {
+        staticBarsRef.value = processingData
+      } else {
+        historyRef.value = processingData
+      }
+
+      needsRedrawRef.value = true
+      processingAnimationRef.value =
+        requestAnimationFrame(animateProcessing)
+    }
+
+    animateProcessing()
+
+    onUnmounted(() => {
+      if (processingAnimationRef.value) {
+        cancelAnimationFrame(processingAnimationRef.value)
+      }
+    })
+  } else if (!props.active && !props.processing) {
+    const hasData =
+      props.mode === "static"
+        ? staticBarsRef.value.length > 0
+        : historyRef.value.length > 0
+
+    if (hasData) {
+      let fadeProgress = 0
+      const fadeToIdle = () => {
+        fadeProgress += 0.03
+        if (fadeProgress < 1) {
+          if (props.mode === "static") {
+            staticBarsRef.value = staticBarsRef.value.map(
+              (value) => value * (1 - fadeProgress)
+            )
+          } else {
+            historyRef.value = historyRef.value.map(
+              (value) => value * (1 - fadeProgress)
+            )
+          }
+          needsRedrawRef.value = true
+          requestAnimationFrame(fadeToIdle)
+        } else {
+          if (props.mode === "static") {
+            staticBarsRef.value = []
+          } else {
+            historyRef.value = []
+          }
+        }
+      }
+      fadeToIdle()
+    }
+  }
+}, { immediate: true })
+
+// Handle microphone setup and teardown
+watch([() => props.active, () => props.deviceId, () => props.fftSize, () => props.smoothingTimeConstant], async () => {
+  if (!props.active) {
+    if (streamRef.value) {
+      streamRef.value.getTracks().forEach((track) => track.stop())
+      streamRef.value = null
+      props.onStreamEnd?.()
+    }
+    if (
+      audioContextRef.value &&
+      audioContextRef.value.state !== "closed"
+    ) {
+      audioContextRef.value.close()
+      audioContextRef.value = null
+    }
+    if (animationRef.value) {
+      cancelAnimationFrame(animationRef.value)
+      animationRef.value = 0
+    }
+    return
+  }
+
+  try {
+     // 获取麦克风音频流
+    const stream = await navigator.mediaDevices.getUserMedia({
+      audio: props.deviceId
+        ? {
+            deviceId: { exact: props.deviceId },
+            echoCancellation: true,
+            noiseSuppression: true,
+            autoGainControl: true,
+          }
+        : {
+            echoCancellation: true,
+            noiseSuppression: true,
+            autoGainControl: true,
+          },
+    })
+    streamRef.value = stream
+    props.onStreamReady?.(stream)
+
+    // 处理和分析音频数据
+    const AudioContextConstructor =
+      window.AudioContext ||
+      (window as any).webkitAudioContext
+      // 创建音频上下文和分析器
+    const audioContext = new AudioContextConstructor()
+    const analyser = audioContext.createAnalyser()
+    analyser.fftSize = props.fftSize
+    analyser.smoothingTimeConstant = props.smoothingTimeConstant
+
+    // 连接音频源到分析器
+    const source = audioContext.createMediaStreamSource(stream)
+    source.connect(analyser)
+
+    audioContextRef.value = audioContext
+    analyserRef.value = analyser
+
+    // Clear history when starting
+    historyRef.value = []
+  } catch (error) {
+    props.onError?.(error as Error)
+  }
+}, { immediate: true })
+
+onUnmounted(() => {
+  if (streamRef.value) {
+    streamRef.value.getTracks().forEach((track) => track.stop())
+    streamRef.value = null
+    props.onStreamEnd?.()
+  }
+  if (
+    audioContextRef.value &&
+    audioContextRef.value.state !== "closed"
+  ) {
+    audioContextRef.value.close()
+    audioContextRef.value = null
+  }
+  if (rafId.value) {
+    cancelAnimationFrame(rafId.value)
+  }
+  if (processingAnimationRef.value) {
+    cancelAnimationFrame(processingAnimationRef.value)
+  }
+})
+
+// Animation loop
+onMounted(() => {
+  // 用于绘制波形的canvas元素
+  const canvas = canvasRef.value
+  if (!canvas) return
+
+  const ctx = canvas.getContext("2d")
+  if (!ctx) return
+
+  const animate = (currentTime: number) => {
+    // Render waveform
+    const rect = canvas.getBoundingClientRect()
+
+    // Update audio data if active
+    if (props.active && currentTime - lastUpdateRef.value > props.updateRate) {
+      lastUpdateRef.value = currentTime
+
+      if (analyserRef.value) {
+        const dataArray = new Uint8Array(
+          analyserRef.value.frequencyBinCount
+        )
+        analyserRef.value.getByteFrequencyData(dataArray)
+
+        // 渲染波形
+        if (props.mode === "static") {
+          // 静态模式,在固定位置更新条形
+          const startFreq = Math.floor(dataArray.length * 0.05)
+          const endFreq = Math.floor(dataArray.length * 0.4)
+          const relevantData = dataArray.slice(startFreq, endFreq)
+
+          const barCount = Math.floor(rect.width / (props.barWidth + props.barGap))
+          const halfCount = Math.floor(barCount / 2)
+          const newBars: number[] = []
+
+          // 镜像数据以实现对称显示
+          for (let i = halfCount - 1; i >= 0; i--) {
+            const dataIndex = Math.floor(
+              (i / halfCount) * relevantData.length
+            )
+            const value = Math.min(
+              1,
+              (relevantData[dataIndex] / 255) * props.sensitivity
+            )
+            newBars.push(Math.max(0.05, value))
+          }
+
+          for (let i = 0; i < halfCount; i++) {
+            const dataIndex = Math.floor(
+              (i / halfCount) * relevantData.length
+            )
+            const value = Math.min(
+              1,
+              (relevantData[dataIndex] / 255) * props.sensitivity
+            )
+            newBars.push(Math.max(0.05, value))
+          }
+
+          staticBarsRef.value = newBars
+          lastActiveDataRef.value = newBars
+        } else {
+          // 滚动模式
+          let sum = 0
+          const startFreq = Math.floor(dataArray.length * 0.05)
+          const endFreq = Math.floor(dataArray.length * 0.4)
+          const relevantData = dataArray.slice(startFreq, endFreq)
+
+          for (let i = 0; i < relevantData.length; i++) {
+            sum += relevantData[i]
+          }
+          const average = (sum / relevantData.length / 255) * props.sensitivity
+
+          // 添加到历史记录
+          historyRef.value.push(Math.min(1, Math.max(0.05, average)))
+          lastActiveDataRef.value = [...historyRef.value]
+
+          // 维护历史记录大小
+          if (historyRef.value.length > props.historySize) {
+            historyRef.value.shift()
+          }
+        }
+        needsRedrawRef.value = true
+      }
+    }
+
+    // Only redraw if needed
+    if (!needsRedrawRef.value && !props.active) {
+      rafId.value = requestAnimationFrame(animate)
+      return
+    }
+
+    needsRedrawRef.value = props.active
+    ctx.clearRect(0, 0, rect.width, rect.height)
+
+    const computedBarColor =
+      props.barColor ||
+      (() => {
+        const style = getComputedStyle(canvas)
+        // Try to get the computed color value directly
+        const color = style.color
+        return color || "#000"
+      })()
+
+    const step = props.barWidth + props.barGap
+    const barCount = Math.floor(rect.width / step)
+    const centerY = rect.height / 2
+
+    // Draw bars based on mode
+    if (props.mode === "static") {
+      // Static mode - bars in fixed positions
+      const dataToRender = props.processing
+        ? staticBarsRef.value
+        : props.active
+          ? staticBarsRef.value
+          : staticBarsRef.value.length > 0
+            ? staticBarsRef.value
+            : []
+
+      for (let i = 0; i < barCount && i < dataToRender.length; i++) {
+        const value = dataToRender[i] || 0.1
+        const x = i * step
+        const barHeight = Math.max(props.barHeight, value * rect.height * 0.8)
+        const y = centerY - barHeight / 2
+
+        ctx.fillStyle = computedBarColor
+        ctx.globalAlpha = 0.4 + value * 0.6
+
+        if (props.barRadius > 0) {
+          ctx.beginPath()
+          ctx.roundRect(x, y, props.barWidth, barHeight, props.barRadius)
+          ctx.fill()
+        } else {
+          ctx.fillRect(x, y, props.barWidth, barHeight)
+        }
+      }
+    } else {
+      // Scrolling mode - original behavior
+      for (let i = 0; i < barCount && i < historyRef.value.length; i++) {
+        const dataIndex = historyRef.value.length - 1 - i
+        const value = historyRef.value[dataIndex] || 0.1
+        const x = rect.width - (i + 1) * step
+        const barHeight = Math.max(props.barHeight, value * rect.height * 0.8)
+        const y = centerY - barHeight / 2
+
+        ctx.fillStyle = computedBarColor
+        ctx.globalAlpha = 0.4 + value * 0.6
+
+        if (props.barRadius > 0) {
+          ctx.beginPath()
+          ctx.roundRect(x, y, props.barWidth, barHeight, props.barRadius)
+          ctx.fill()
+        } else {
+          ctx.fillRect(x, y, props.barWidth, barHeight)
+        }
+      }
+    }
+
+    // Apply edge fading
+    if (props.fadeEdges && props.fadeWidth > 0 && rect.width > 0) {
+      // Cache gradient if width hasn't changed
+      if (!gradientCacheRef.value || lastWidthRef.value !== rect.width) {
+        const gradient = ctx.createLinearGradient(0, 0, rect.width, 0)
+        const fadePercent = Math.min(0.3, props.fadeWidth / rect.width)
+
+        // destination-out: removes destination where source alpha is high
+        // We want: fade edges out, keep center solid
+        // Left edge: start opaque (1) = remove, fade to transparent (0) = keep
+        gradient.addColorStop(0, "rgba(255,255,255,1)")
+        gradient.addColorStop(fadePercent, "rgba(255,255,255,0)")
+        // Center stays transparent = keep everything
+        gradient.addColorStop(1 - fadePercent, "rgba(255,255,255,0)")
+        // Right edge: fade from transparent (0) = keep to opaque (1) = remove
+        gradient.addColorStop(1, "rgba(255,255,255,1)")
+
+        gradientCacheRef.value = gradient
+        lastWidthRef.value = rect.width
+      }
+
+      ctx.globalCompositeOperation = "destination-out"
+      ctx.fillStyle = gradientCacheRef.value!
+      ctx.fillRect(0, 0, rect.width, rect.height)
+      ctx.globalCompositeOperation = "source-over"
+    }
+
+    ctx.globalAlpha = 1
+
+    rafId.value = requestAnimationFrame(animate)
+  }
+
+  rafId.value = requestAnimationFrame(animate)
+
+  onUnmounted(() => {
+    if (rafId.value) {
+      cancelAnimationFrame(rafId.value)
+    }
+  })
+})
+</script>
+
+<style scoped>
+/* Add any component-specific styles here */
+</style>

+ 45 - 13
src/components/ai/voice/VoiceInput.vue

@@ -1,20 +1,35 @@
 <template>
-  <button
-    @click="toggleSpeechInput"
-    class="speech-btn"
-    :class="{ 'recording': isRecording }"
-  >
-    <el-icon v-if="!isRecording"><Microphone /></el-icon>
-    <el-icon v-else><Mute /></el-icon>
-    <!-- 显示倒计时(仅录音时显示) -->
-    <span v-if="isRecording" class="countdown-text">{{ countdown }}s</span>
-  </button>
+  <div class="voice-input-container">
+    <button
+      @click="toggleSpeechInput"
+      class="speech-btn"
+      :class="{ 'recording': isRecording }"
+    >
+      <el-icon v-if="!isRecording"><Microphone /></el-icon>
+      <el-icon v-else><Mute /></el-icon>
+      <!-- 显示倒计时(仅录音时显示) -->
+      <span v-if="isRecording" class="countdown-text">{{ countdown }}s</span>
+      <div class="waveform-container" v-if="isRecording">
+        <LiveWaveform 
+          :active="isRecording" 
+          :processing="false"
+          :height="25"
+          :barWidth="2"
+          :barGap="1"
+          :barRadius="1"
+          :sensitivity="1.2"
+        />
+      </div>
+    </button>
+
+  </div>
 </template>
 
 <script setup>
 import { ref, onMounted, onUnmounted } from 'vue'
 import { Microphone, Mute } from '@element-plus/icons-vue'
 import { ElMessage } from 'element-plus'
+import LiveWaveform from './LiveWaveform .vue'
 
 // 定义props
 const props = defineProps({
@@ -49,11 +64,14 @@ const initSpeechRecognition = () => {
 
   const instance = new SpeechRecognition()
   instance.lang = props.lang
-  instance.interimResults = false
+  instance.interimResults = true
 
   instance.onresult = (event) => {
-    if (event.results?.[0]?.[0]) {
-      emit('voiceRecognized', event.results[0][0].transcript)
+    // 遍历所有结果,包括临时结果
+    for (let i = event.resultIndex; i < event.results.length; i++) {
+      const transcript = event.results[i][0].transcript
+      // 无论是否是最终结果,实时识别结果
+      emit('voiceRecognized', transcript)
     }
   }
 
@@ -139,6 +157,12 @@ onUnmounted(() => {
 @function rpx($px) {
   @return math.div($px, 750) * 100vw;
 }
+.voice-input-container {
+  display: flex;
+  flex-direction: column;
+  align-items: center;
+  gap: rpx(8);
+}
 .speech-btn {
   padding: rpx(5) rpx(10);
   background: #fff;
@@ -163,4 +187,12 @@ onUnmounted(() => {
     color: #666;
   }
 }
+.waveform-container {
+  // width: 100%;
+  max-width: rpx(40);
+}
+.countdown-text {
+  font-size: rpx(6);
+  color: #666;
+}
 </style>

部分文件因文件數量過多而無法顯示