效果示例

用法
<!-- 浏览器语音识别 -->
<BrowserSpeechRecognitionModal v-if="showModal" :isOpen="showModal" @close="showModal = false" @confirm="handleRecognitionResult" />const showModal = ref(false);
const inputText= ref(false);
const handleRecognitionResult = (text: any) => {inputText.value = text;
};
BrowserSpeechRecognitionModal.vue
<template><transition name="modal-fade"><div v-if="isOpen" class="modal-overlay" @click.self="handleOverlayClick"><div class="modal-container"><div class="modal-header"><h2>语音输入</h2><button class="close-button" @click="closeModal">×</button></div><div class="modal-body"><div class="status-indicator"><div v-if="isListening" class="mic-animation"><div class="mic-icon"><svg viewBox="0 0 24 24"><pathd="M12,2A3,3 0 0,1 15,5V11A3,3 0 0,1 12,14A3,3 0 0,1 9,11V5A3,3 0 0,1 12,2M19,11C19,14.53 16.39,17.44 13,17.93V21H11V17.93C7.61,17.44 5,14.53 5,11H7A5,5 0 0,0 12,16A5,5 0 0,0 17,11H19Z" /></svg></div><div class="sound-wave"><div class="wave"></div><div class="wave"></div><div class="wave"></div></div></div><div v-else class="mic-ready"><svg viewBox="0 0 24 24"><pathd="M12,2A3,3 0 0,1 15,5V11A3,3 0 0,1 12,14A3,3 0 0,1 9,11V5A3,3 0 0,1 12,2M19,11C19,14.53 16.39,17.44 13,17.93V21H11V17.93C7.61,17.44 5,14.53 5,11H7A5,5 0 0,0 12,16A5,5 0 0,0 17,11H19Z" /></svg></div><p class="status-text">{{ statusText }}</p><div v-if="recognitionError" class="error-message"><svg viewBox="0 0 24 24" class="error-icon"><pathd="M12,2C17.53,2 22,6.47 22,12C22,17.53 17.53,22 12,22C6.47,22 2,17.53 2,12C2,6.47 6.47,2 12,2M15.59,7L12,10.59L8.41,7L7,8.41L10.59,12L7,15.59L8.41,17L12,13.41L15.59,17L17,15.59L13.41,12L17,8.41L15.59,7Z" /></svg><span>{{ friendlyErrorMessage }}</span></div></div><div class="result-container"><div class="result-content" :class="{ 'has-result': transcript }">{{ transcript || '请点击"开始录音"按钮并说话...' }}</div></div></div><div class="modal-footer"><button @click="toggleRecognition" class="control-button" :class="{ 'listening': isListening }":disabled="!isBrowserSupported">{{ isListening ? '停止录音' : '开始录音' }}</button><button @click="confirmResult" class="confirm-button" :disabled="!transcript">使用内容</button></div></div></div></transition>
</template><script setup>
import { ref, computed, onMounted, onBeforeUnmount, watch } from 'vue';const props = defineProps({isOpen: {type: Boolean,required: true},lang: {type: String,default: 'zh-CN'}
});const emit = defineEmits(['close', 'confirm']);const recognition = ref(null);
const isListening = ref(false);
const transcript = ref('');
const recognitionError = ref(null);
const isBrowserSupported = ref(true);
const errorMessageMap = {'no-speech': '未检测到语音,请靠近麦克风说话','audio-capture': '无法访问麦克风,请检查麦克风是否已连接','not-allowed': '麦克风权限被拒绝,请允许浏览器使用麦克风','aborted': '语音识别已中止','network': '网络连接问题,请检查网络后重试','not-supported': '您的浏览器不支持语音识别功能','service-not-allowed': '语音识别服务不可用','bad-grammar': '识别过程中出现语法错误','language-not-supported': '不支持当前语言设置','default': '语音识别出现问题,请刷新页面后重试'
};
const friendlyErrorMessage = computed(() => {if (!recognitionError.value) return '';return errorMessageMap[recognitionError.value] || errorMessageMap['default'];
});const statusText = computed(() => {if (recognitionError.value) {return '语音识别遇到问题';}return isListening.value ? '正在聆听中...请说话' : '点击"开始录音"按钮开始说话';
});
const checkBrowserSupport = () => {const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;isBrowserSupported.value = !!SpeechRecognition;if (!isBrowserSupported.value) {recognitionError.value = 'not-supported';}return isBrowserSupported.value;
};
const initRecognition = () => {const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;recognition.value = new SpeechRecognition();recognition.value.continuous = true;recognition.value.interimResults = true;recognition.value.lang = props.lang;recognition.value.onstart = () => {isListening.value = true;recognitionError.value = null;console.log('语音识别已启动');};recognition.value.onend = () => {isListening.value = false;console.log('语音识别已结束');};recognition.value.onresult = (event) => {let interimTranscript = '';let finalTranscript = '';for (let i = event.resultIndex; i < event.results.length; i++) {const result = event.results[i];const transcript = result[0].transcript;if (result.isFinal) {finalTranscript += transcript;} else {interimTranscript += transcript;}}transcript.value = finalTranscript || interimTranscript;console.log('识别结果:', transcript.value);};recognition.value.onerror = (event) => {recognitionError.value = event.error;isListening.value = false;console.error('语音识别错误:', event.error);if (['network', 'service-not-allowed'].includes(event.error)) {setTimeout(() => {if (props.isOpen && !isListening.value) {startRecognition();}}, 1500);}};
};
const checkMicrophonePermission = async () => {try {const stream = await navigator.mediaDevices.getUserMedia({ audio: true });stream.getTracks().forEach(track => track.stop());return true;} catch (error) {console.error('麦克风权限被拒绝:', error);recognitionError.value = 'not-allowed';return false;}
};
const startRecognition = async () => {if (!recognition.value) {initRecognition();}const hasPermission = await checkMicrophonePermission();if (!hasPermission) return;transcript.value = '';recognitionError.value = null;try {recognition.value.start();} catch (error) {console.error('启动语音识别失败:', error);recognitionError.value = 'service-not-allowed';}
};
const stopRecognition = () => {if (!recognition.value) return;try {recognition.value.stop();} catch (error) {console.error('停止语音识别失败:', error);}
};
const toggleRecognition = () => {if (!isBrowserSupported.value) return;if (isListening.value) {stopRecognition();} else {startRecognition();}
};
const closeModal = () => {if (isListening.value) {stopRecognition();}emit('close');
};
const confirmResult = () => {emit('confirm', transcript.value);closeModal();
};
const handleOverlayClick = (event) => {if (event.target === event.currentTarget) {closeModal();}
};
watch(() => props.lang, (newLang) => {if (recognition.value) {recognition.value.lang = newLang;}
});
onMounted(() => {checkBrowserSupport();if (isBrowserSupported.value) {initRecognition();}
});
onBeforeUnmount(() => {if (recognition.value) {recognition.value.onend = null;recognition.value.onresult = null;recognition.value.onerror = null;if (isListening.value) {recognition.value.abort();}}
});
</script><style scoped>
.modal-overlay {position: fixed;top: 0;left: 0;right: 0;bottom: 0;background-color: rgba(0, 0, 0, 0.5);display: flex;justify-content: center;align-items: center;z-index: 1000;
}.modal-container {background-color: white;border-radius: 12px;box-shadow: 0 4px 20px rgba(0, 0, 0, 0.15);width: 90%;max-width: 500px;max-height: 90vh;display: flex;flex-direction: column;overflow: hidden;
}.modal-header {padding: 16px 24px;border-bottom: 1px solid #eee;display: flex;justify-content: space-between;align-items: center;
}.modal-header h2 {margin: 0;font-size: 1.25rem;color: #333;
}.close-button {background: none;border: none;font-size: 1.5rem;cursor: pointer;color: #666;padding: 0;line-height: 1;outline: none;
}.modal-body {padding: 24px;flex: 1;overflow-y: auto;
}.status-indicator {display: flex;flex-direction: column;align-items: center;margin-bottom: 24px;
}.mic-animation {display: flex;align-items: center;gap: 12px;margin-bottom: 8px;
}.mic-icon svg,
.mic-ready svg {width: 36px;height: 36px;fill: #4a6cf7;
}.sound-wave {display: flex;align-items: center;gap: 4px;height: 36px;
}.wave {width: 6px;height: 16px;background-color: #4a6cf7;border-radius: 3px;animation: wave 1.2s infinite ease-in-out;
}.wave:nth-child(1) {animation-delay: -0.6s;
}.wave:nth-child(2) {animation-delay: -0.3s;
}.wave:nth-child(3) {animation-delay: 0s;
}@keyframes wave {0%,60%,100% {transform: scaleY(0.4);}30% {transform: scaleY(1);}
}.mic-ready svg {opacity: 0.7;
}.status-text {margin: 0;color: #666;font-size: 0.9rem;text-align: center;
}.result-container {background-color: #f8f9fa;border-radius: 8px;padding: 16px;min-height: 120px;
}.result-content {color: #666;font-size: 0.95rem;line-height: 1.5;
}.result-content.has-result {color: #333;
}.modal-footer {padding: 16px 24px;border-top: 1px solid #eee;display: flex;justify-content: flex-end;gap: 12px;
}.control-button {padding: 8px 16px;background-color: #f0f2f5;border: none;outline: none;border-radius: 6px;color: #333;cursor: pointer;font-weight: 500;transition: all 0.2s;
}.control-button.listening {background-color: #ffebee;color: #f44336;
}.control-button:hover {background-color: #e4e6eb;
}.confirm-button {padding: 8px 16px;background-color: #4a6cf7;border: none;outline: none;border-radius: 6px;color: white;cursor: pointer;font-weight: 500;transition: background-color 0.2s;
}.confirm-button:hover {background-color: #3a5bd9;
}.confirm-button:disabled {background-color: #cccccc;cursor: not-allowed;
}.modal-fade-enter-active,
.modal-fade-leave-active {transition: opacity 0.3s ease;
}.modal-fade-enter-from,
.modal-fade-leave-to {opacity: 0;
}.error-message {display: flex;align-items: center;justify-content: center;gap: 8px;margin-top: 12px;padding: 8px 12px;background-color: #ffebee;border-radius: 6px;color: #d32f2f;font-size: 0.9rem;
}.error-icon {width: 18px;height: 18px;fill: #d32f2f;
}.control-button:disabled {background-color: #e0e0e0;color: #9e9e9e;cursor: not-allowed;
}.status-text {font-weight: 500;color: #333;margin-bottom: 4px;
}
</style>