feat: Support autoplay audio tag

--story=1017623 --user=刘瑞斌 【应用】-语音播放时遇到音频文件直接播放文件内容 https://www.tapd.cn/57709429/s/1642154
This commit is contained in:
CaptainB 2025-01-06 15:43:24 +08:00 committed by 刘瑞斌
parent d9df013e33
commit 6b23fcd11c
2 changed files with 91 additions and 34 deletions

View File

@ -76,7 +76,7 @@
</span>
</div>
<!-- 先渲染不然不能播放 -->
<audio ref="audioPlayer" controls hidden="hidden"></audio>
<audio ref="audioPlayer" v-for="item in audioList" :key="item" controls hidden="hidden"></audio>
</template>
<script setup lang="ts">
import { onMounted, ref } from 'vue'
@ -110,11 +110,13 @@ const props = withDefaults(
const emit = defineEmits(['update:data', 'regeneration'])
const audioPlayer = ref<HTMLAudioElement | null>(null)
const audioPlayer = ref<HTMLAudioElement[] | null>([])
const audioPlayerStatus = ref(false)
const buttonData = ref(props.data)
const loading = ref(false)
const utterance = ref<SpeechSynthesisUtterance | null>(null)
const audioList = ref<string[]>([])
const currentAudioIndex = ref(0)
function regeneration() {
emit('regeneration')
@ -170,8 +172,29 @@ const playAnswerText = (text: string) => {
text = markdownToPlainText(text)
// console.log(text)
audioPlayerStatus.value = true
if (props.tts_type === 'BROWSER') {
if (text !== utterance.value?.text) {
//
audioList.value = text.split(/(<audio[^>]*><\/audio>)/)
playAnswerTextPart()
}
const playAnswerTextPart = () => {
// console.log(audioList.value, currentAudioIndex.value)
if (currentAudioIndex.value === audioList.value.length) {
audioPlayerStatus.value = false
currentAudioIndex.value = 0
return
}
if (audioList.value[currentAudioIndex.value].includes('<audio')) {
if (audioPlayer.value) {
audioPlayer.value[currentAudioIndex.value].src = audioList.value[currentAudioIndex.value].match(/src="([^"]*)"/)?.[1] || ''
audioPlayer.value[currentAudioIndex.value].play() //
audioPlayer.value[currentAudioIndex.value].onended = () => {
currentAudioIndex.value += 1
playAnswerTextPart()
}
}
} else if (props.tts_type === 'BROWSER') {
if (audioList.value[currentAudioIndex.value] !== utterance.value?.text) {
window.speechSynthesis.cancel()
}
if (window.speechSynthesis.paused) {
@ -179,10 +202,11 @@ const playAnswerText = (text: string) => {
return
}
// SpeechSynthesisUtterance
utterance.value = new SpeechSynthesisUtterance(text)
utterance.value = new SpeechSynthesisUtterance(audioList.value[currentAudioIndex.value])
utterance.value.onend = () => {
audioPlayerStatus.value = false
utterance.value = null
currentAudioIndex.value += 1
playAnswerTextPart()
}
utterance.value.onerror = () => {
audioPlayerStatus.value = false
@ -190,15 +214,14 @@ const playAnswerText = (text: string) => {
}
//
window.speechSynthesis.speak(utterance.value)
}
if (props.tts_type === 'TTS') {
} else if (props.tts_type === 'TTS') {
//
if (audioPlayer.value?.src) {
audioPlayer.value?.play()
if (audioPlayer.value && audioPlayer.value[currentAudioIndex.value]?.src) {
audioPlayer.value[currentAudioIndex.value].play()
return
}
applicationApi
.postTextToSpeech((props.applicationId as string) || (id as string), { text: text }, loading)
.postTextToSpeech((props.applicationId as string) || (id as string), { text: audioList.value[currentAudioIndex.value] }, loading)
.then(async (res: any) => {
if (res.type === 'application/json') {
const text = await res.text()
@ -219,11 +242,12 @@ const playAnswerText = (text: string) => {
// link.click()
// audioPlayer DOM
if (audioPlayer.value instanceof HTMLAudioElement) {
audioPlayer.value.src = url
audioPlayer.value.play() //
audioPlayer.value.onended = () => {
audioPlayerStatus.value = false
if (audioPlayer.value) {
audioPlayer.value[currentAudioIndex.value].src = url
audioPlayer.value[currentAudioIndex.value].play() //
audioPlayer.value[currentAudioIndex.value].onended = () => {
currentAudioIndex.value += 1
playAnswerTextPart()
}
} else {
console.error('audioPlayer.value is not an instance of HTMLAudioElement')
@ -238,7 +262,11 @@ const playAnswerText = (text: string) => {
const pausePlayAnswerText = () => {
audioPlayerStatus.value = false
if (props.tts_type === 'TTS') {
audioPlayer.value?.pause()
if (audioPlayer.value) {
audioPlayer.value?.forEach((item) => {
item.pause()
})
}
}
if (props.tts_type === 'BROWSER') {
window.speechSynthesis.pause()

View File

@ -54,7 +54,7 @@
<EditContentDialog ref="EditContentDialogRef" @refresh="refreshContent" />
<EditMarkDialog ref="EditMarkDialogRef" @refresh="refreshMark" />
<!-- 先渲染不然不能播放 -->
<audio ref="audioPlayer" controls hidden="hidden"></audio>
<audio ref="audioPlayer" v-for="item in audioList" :key="item" controls hidden="hidden"></audio>
</div>
</div>
</template>
@ -88,7 +88,7 @@ const props = defineProps({
const emit = defineEmits(['update:data'])
const audioPlayer = ref<HTMLAudioElement | null>(null)
const audioPlayer = ref<HTMLAudioElement[] | null>(null)
const EditContentDialogRef = ref()
const EditMarkDialogRef = ref()
@ -96,6 +96,8 @@ const EditMarkDialogRef = ref()
const buttonData = ref(props.data)
const loading = ref(false)
const utterance = ref<SpeechSynthesisUtterance | null>(null)
const audioList = ref<string[]>([])
const currentAudioIndex = ref(0)
function editContent(data: any) {
EditContentDialogRef.value.open(data)
@ -149,8 +151,29 @@ const playAnswerText = (text: string) => {
text = markdownToPlainText(text)
// console.log(text)
audioPlayerStatus.value = true
if (props.tts_type === 'BROWSER') {
if (text !== utterance.value?.text) {
//
audioList.value = text.split(/(<audio[^>]*><\/audio>)/)
playAnswerTextPart()
}
const playAnswerTextPart = () => {
// console.log(audioList.value, currentAudioIndex.value)
if (currentAudioIndex.value === audioList.value.length) {
audioPlayerStatus.value = false
currentAudioIndex.value = 0
return
}
if (audioList.value[currentAudioIndex.value].includes('<audio')) {
if (audioPlayer.value) {
audioPlayer.value[currentAudioIndex.value].src = audioList.value[currentAudioIndex.value].match(/src="([^"]*)"/)?.[1] || ''
audioPlayer.value[currentAudioIndex.value].play() //
audioPlayer.value[currentAudioIndex.value].onended = () => {
currentAudioIndex.value += 1
playAnswerTextPart()
}
}
} else if (props.tts_type === 'BROWSER') {
if (audioList.value[currentAudioIndex.value] !== utterance.value?.text) {
window.speechSynthesis.cancel()
}
if (window.speechSynthesis.paused) {
@ -158,10 +181,11 @@ const playAnswerText = (text: string) => {
return
}
// SpeechSynthesisUtterance
utterance.value = new SpeechSynthesisUtterance(text)
utterance.value = new SpeechSynthesisUtterance(audioList.value[currentAudioIndex.value])
utterance.value.onend = () => {
audioPlayerStatus.value = false
utterance.value = null
currentAudioIndex.value += 1
playAnswerTextPart()
}
utterance.value.onerror = () => {
audioPlayerStatus.value = false
@ -169,15 +193,14 @@ const playAnswerText = (text: string) => {
}
//
window.speechSynthesis.speak(utterance.value)
}
if (props.tts_type === 'TTS') {
} else if (props.tts_type === 'TTS') {
//
if (audioPlayer.value?.src) {
audioPlayer.value?.play()
if (audioPlayer.value && audioPlayer.value[currentAudioIndex.value]?.src) {
audioPlayer.value[currentAudioIndex.value].play()
return
}
applicationApi
.postTextToSpeech(id || (props.applicationId as string), { text: text }, loading)
.postTextToSpeech((props.applicationId as string) || (id as string), { text: audioList.value[currentAudioIndex.value] }, loading)
.then(async (res: any) => {
if (res.type === 'application/json') {
const text = await res.text()
@ -198,11 +221,12 @@ const playAnswerText = (text: string) => {
// link.click()
// audioPlayer DOM
if (audioPlayer.value instanceof HTMLAudioElement) {
audioPlayer.value.src = url
audioPlayer.value.play() //
audioPlayer.value.onended = () => {
audioPlayerStatus.value = false
if (audioPlayer.value) {
audioPlayer.value[currentAudioIndex.value].src = url
audioPlayer.value[currentAudioIndex.value].play() //
audioPlayer.value[currentAudioIndex.value].onended = () => {
currentAudioIndex.value += 1
playAnswerTextPart()
}
} else {
console.error('audioPlayer.value is not an instance of HTMLAudioElement')
@ -217,13 +241,18 @@ const playAnswerText = (text: string) => {
const pausePlayAnswerText = () => {
audioPlayerStatus.value = false
if (props.tts_type === 'TTS') {
audioPlayer.value?.pause()
if (audioPlayer.value) {
audioPlayer.value?.forEach((item) => {
item.pause()
})
}
}
if (props.tts_type === 'BROWSER') {
window.speechSynthesis.pause()
}
}
function refreshMark() {
buttonData.value.improve_paragraph_id_list = []
emit('update:data', buttonData.value)