Files
lanecarford_front/src/views/stylist/dressfor.vue
2025-12-01 11:30:31 +08:00

253 lines
6.5 KiB
Vue

<template>
<header-title style-type="3" />
<div class="dressfor-container flex">
<div class="content flex-1 flex flex-column">
<!-- <div class="setting flex flex-between">
<van-icon name="arrow-left" color="#fff" @click="handleBack" />
<SvgIcon name="setting" size="70" />
</div> -->
<div class="text">What are you dressing for?</div>
<!-- <div class="start-btn" @click="handleStart">Start</div> -->
<div class="chatbox flex flex-center">
<div class="input-box flex">
<div class="input-wrapper flex-1 flex">
<input
type="text"
class="input-item flex-1"
v-model="inputValue"
placeholder="Ask sth!"
v-show="!isRecording"
/>
<div class="recording-visualizer flex-1" v-show="isRecording">
<AudioVisualizer ref="audioVisualizerRef" />
</div>
</div>
<SvgIcon
class="audio-icon"
:name="isRecording ? 'pause' : 'audio'"
size="35"
color="#6D6868"
@click="handleClickAudio"
/>
</div>
<div class="send flex flex-center" @click="handleSendMessage">
<SvgIcon class="send-icon" name="send" size="26" color="#000000" />
</div>
</div>
</div>
</div>
<footer-navigation />
</template>
<script setup lang="ts">
import { ref, onUnmounted, nextTick, watch } from 'vue'
import { showToast } from 'vant'
import HeaderTitle from '@/components/HeaderTitle.vue'
import FooterNavigation from '@/components/FooterNavigation.vue'
import { useRouter } from 'vue-router'
import AudioVisualizer from '@/views/asistant/components/AudioVisualizer.vue'
const router = useRouter()
const inputValue = ref('')
const isRecording = ref(false)
const audioVisualizerRef = ref<InstanceType<typeof AudioVisualizer> | null>(null)
let speechRecognition: any = null
let lastTranscript = ''
let isSpeechRecognitionActive = false
const refreshAudioVisualizer = () => {
audioVisualizerRef.value?.updateLines?.()
}
watch(isRecording, async (newVal) => {
if (newVal) {
await nextTick()
refreshAudioVisualizer()
setTimeout(() => {
refreshAudioVisualizer()
}, 50)
}
})
const handleSendMessage = () => {
const message = inputValue.value.trim()
if(!message){
showToast('Please enter a message')
return
}
showToast('Voice-to-text conversion has been completed.')
router.push({
path: '/asistant',
query: message ? { message } : undefined
})
}
const handleClickAudio = () => {
if (isRecording.value) {
stopRecording()
} else {
startRecording()
}
}
const startRecording = () => {
if (isSpeechRecognitionActive) {
console.warn('Speech recognition already running')
return
}
if (!speechRecognition) {
if (!('webkitSpeechRecognition' in window) && !('SpeechRecognition' in window)) {
showToast(
'Your browser does not support speech recognition, please try again with another browser'
)
return
}
const SpeechRecognition =
(window as any).SpeechRecognition || (window as any).webkitSpeechRecognition
speechRecognition = new SpeechRecognition()
speechRecognition.continuous = true
speechRecognition.interimResults = true
speechRecognition.lang = 'en-US'
}
speechRecognition.onstart = () => {
isRecording.value = true
}
speechRecognition.onresult = (event: any) => {
let finalTranscript = ''
let interimTranscript = ''
for (let i = event.resultIndex; i < event.results.length; i++) {
const transcript = event.results[i][0].transcript
if (event.results[i].isFinal) {
finalTranscript += transcript
} else {
interimTranscript += transcript
}
}
if (finalTranscript && finalTranscript !== lastTranscript) {
lastTranscript = finalTranscript
inputValue.value = finalTranscript
}
if (interimTranscript) {
console.log('Speech recognition interim result:', interimTranscript)
}
}
speechRecognition.onend = () => {
isRecording.value = false
lastTranscript = ''
isSpeechRecognitionActive = false
}
speechRecognition.onerror = (event: any) => {
console.error('Speech recognition error:', event.error)
isRecording.value = false
isSpeechRecognitionActive = false
showToast('Speech recognition failed, please try again')
}
speechRecognition.start()
isSpeechRecognitionActive = true
}
const stopRecording = () => {
if (speechRecognition && isSpeechRecognitionActive) {
speechRecognition.stop()
isSpeechRecognitionActive = false
}
}
onUnmounted(() => {
if (speechRecognition && isRecording.value) {
speechRecognition.stop()
}
speechRecognition = null
})
</script>
<style lang="less" scoped>
.c-svg{
width: initial;
height: initial;
}
.dressfor-container {
height: calc(100vh - 12rem - 14.9rem);
overflow: hidden;
color: #fff;
position: relative;
background: url('@/assets/images/dress_for_bg.png') no-repeat center center;
background-size: cover;
background-position: center;
background-repeat: no-repeat;
padding: 6rem 0 0 0;
.content {
.setting {
padding: 0 4.9rem 0 8.4rem;
font-size: 7rem;
.c-svg {
width: initial;
}
}
.text {
font-family: 'satoshiBold';
font-size: 11rem;
line-height: 106%;
text-align: center;
margin-top: 43.8rem;
margin-bottom: 14rem;
}
.chatbox {
height: 9.3rem;
// background-color: #fff;
column-gap: 2.29rem;
.input-box {
width: 59.8rem;
height: 100%;
background-color: #fff;
// border: 2px solid #5f5f5f;
border-radius: 1rem;
color: #222222;
font-size: 3.2rem;
font-family: 'satoshiRegular';
padding: 0 2.6rem;
column-gap: 2.6rem;
overflow: hidden;
.input-wrapper{
overflow: hidden;
}
.recording-visualizer {
display: flex;
align-items: center;
height: 100%;
:deep(.audio-visualizer) {
width: 100%;
padding: 0;
}
:deep(.visualizer-container) {
height: 100%;
}
}
.input-item {
// width: 100%;
height: 100%;
outline: none;
border: none;
}
.audio-icon {
width: initial;
}
}
.send {
width: 7.6rem;
height: 7.6rem;
background-color: #fff;
border-radius: 1rem;
}
}
}
}
</style>