street-lingo/apps/indonesian-app/src/components/SpeechInterface.vue

1802 lines
46 KiB
Vue

<template>
<div class="speech-interface">
<div class="conversation-area">
<div class="messages" ref="messagesContainer">
<div
v-for="message in messages"
:key="message.id"
:class="['message', message.type]"
>
<div class="message-content">
<p>{{ message.text }}</p>
<div v-if="message.type === 'ai'" class="translation-section">
<button
v-if="!message.showTranslation"
@click="requestTranslation(message)"
:disabled="message.loadingTranslation"
class="translation-toggle"
>
{{ message.loadingTranslation ? 'Translating...' : '🌍 Show English' }}
</button>
<div v-if="message.showTranslation" class="translation-content">
<p class="translation-text">{{ message.englishTranslation }}</p>
<button @click="hideTranslation(message)" class="translation-toggle hide">
✕ Hide English
</button>
</div>
</div>
<div v-if="message.audio" class="audio-player">
<audio :src="message.audio" controls autoplay></audio>
</div>
</div>
<span class="timestamp">{{ formatTime(message.timestamp) }}</span>
</div>
</div>
<div class="transcription-display" v-if="currentTranscription">
<p class="transcription-text">{{ currentTranscription }}</p>
<span class="transcription-status">{{ getTranscriptionStatus() }}</span>
</div>
<!-- Recording Stopped Notification -->
<div v-if="showRecordingStoppedNotification" class="recording-stopped-notification">
<p>🛑 Recording stopped. Click "Speak" to continue.</p>
</div>
<!-- Inline Suggestion Panel -->
<div v-if="showSuggestionPopup" class="suggestion-panel">
<div class="suggestion-panel-header">
<h4>💡 Help Suggestions</h4>
<button @click="closeSuggestionPopup" class="suggestion-close-btn">✕</button>
</div>
<div class="suggestion-panel-content" v-if="suggestionContent">
<p class="suggestion-panel-intro">{{ suggestionContent.intro }}</p>
<div class="suggestion-panel-list">
<div
v-for="(suggestion, index) in suggestionContent.suggestions"
:key="index"
class="suggestion-panel-item"
>
<div class="suggestion-panel-text">
<strong>{{ suggestion.indonesian_text }}</strong>
</div>
<div class="suggestion-panel-translation">
{{ suggestion.english_meaning }}
</div>
</div>
</div>
</div>
</div>
</div>
<div class="controls">
<button
@click="toggleRecording"
:class="['record-btn', { recording: isRecording }]"
:disabled="isConnecting || isFinished"
>
{{ isRecording ? '🛑 Stop' : '🎤 Speak' }}
</button>
<button
v-if="isRecording"
@click="forceStopRecording"
class="force-stop-btn"
title="Force stop"
:disabled="isFinished"
>
⏹️
</button>
<div class="suggestion-toggle-container">
<label class="suggestion-toggle-label" for="suggestion-toggle">
💡 Help
</label>
<input
id="suggestion-toggle"
type="checkbox"
v-model="suggestionsEnabled"
class="suggestion-toggle"
/>
</div>
<input
v-model="textInput"
@keyup.enter="sendTextMessage"
placeholder="Type in Indonesian..."
class="text-input"
:disabled="isRecording || isFinished"
/>
<button
@click="sendTextMessage"
:disabled="!textInput.trim() || isRecording || isFinished"
class="send-btn"
>
Send
</button>
<button
@click="resetConversation"
class="reset-btn"
title="Start new conversation"
>
🔄 Reset
</button>
<button
@click="finishConversation"
class="finish-btn"
title="End conversation and get assessment"
:disabled="messages.length === 0 || isFinished"
>
✓ Finish Conversation
</button>
</div>
<div class="goal-section" v-if="goalStatus">
<h3 class="goal-title">
🎯 Goal: {{ goalStatus.scenario_goal }}
</h3>
<div class="goal-checklist">
<div
v-for="item in goalStatus.goal_items"
:key="item.id"
:class="['goal-item', { completed: item.completed }]"
>
<span class="goal-checkbox">
{{ item.completed ? '✅' : '⏳' }}
</span>
<span class="goal-description">{{ item.description }}</span>
</div>
</div>
<div v-if="conversationComplete" class="completion-banner">
🎉 All goals completed! Well done!
</div>
</div>
<!-- Conversation Feedback -->
<div class="feedback-section" v-if="conversationFeedback">
<h3 class="feedback-title">
🎉 Great conversation! Here's some helpful feedback:
</h3>
<div class="feedback-content">
<div class="feedback-summary">
<p class="feedback-encouragement">{{ conversationFeedback.encouragement }}</p>
</div>
<div class="feedback-suggestions" v-if="conversationFeedback.suggestions && conversationFeedback.suggestions.length > 0">
<h4>💡 Tips to sound even more natural:</h4>
<div
v-for="(suggestion, index) in conversationFeedback.suggestions"
:key="index"
class="feedback-suggestion"
>
<div class="suggestion-title">{{ suggestion.category }}</div>
<div class="suggestion-text">{{ suggestion.tip }}</div>
</div>
</div>
<div class="feedback-examples" v-if="conversationFeedback.examples && conversationFeedback.examples.length > 0">
<h4>🗣️ Alternative ways to express yourself:</h4>
<div
v-for="(example, index) in conversationFeedback.examples"
:key="index"
class="feedback-example"
>
<div class="example-original">
<strong>You said:</strong> "{{ example.original }}"
</div>
<div class="example-improved">
<strong>You could also say:</strong> "{{ example.improved }}"
</div>
<div class="example-reason" v-if="example.reason">
<em>{{ example.reason }}</em>
</div>
</div>
</div>
<div class="feedback-encouragement-final">
<p>Keep practicing - you're doing great! 🌟</p>
</div>
</div>
</div>
<div class="status" v-if="connectionStatus">
<span :class="['status-indicator', connectionStatus]">
{{ getStatusMessage() }}
</span>
</div>
</div>
</template>
<script>
export default {
name: 'SpeechInterface',
inject: ['updateConversationProgress'],
props: {
scenario: {
type: String,
default: ''
}
},
data() {
return {
messages: [],
currentTranscription: '',
isTranscriptionFinal: false,
isRecording: false,
isConnecting: false,
textInput: '',
connectionStatus: 'disconnected',
websocket: null,
mediaRecorder: null,
audioStream: null,
audioContext: null,
messageId: 0,
wsBaseUrl: import.meta.env.VITE_WS_BASE_URL || 'ws://localhost:8000',
goalStatus: null,
conversationComplete: false,
scenarioData: null,
isAutoListening: false,
suggestionsEnabled: false,
showSuggestionPopup: false,
suggestionContent: null,
pauseTimer: null,
lastInteractionTime: null,
silenceTimer: null,
lastVoiceActivityTime: null,
showRecordingStoppedNotification: false,
lastAIResponseTime: null,
isFinished: false,
conversationFeedback: null
}
},
mounted() {
this.connectWebSocket()
this.loadScenarioData()
},
beforeUnmount() {
this.disconnect()
this.clearPauseTimer()
},
watch: {
scenario: {
handler(newScenario, oldScenario) {
if (oldScenario && newScenario !== oldScenario) {
// Reset conversation when scenario changes
this.resetConversationOnScenarioChange()
} else if (newScenario && !oldScenario) {
// Initial scenario load - request greeting if connected
console.log('Scenario initially set to:', newScenario)
if (this.connectionStatus === 'connected') {
this.requestInitialGreeting()
}
}
}
},
messages: {
handler(newMessages) {
// Update parent about conversation progress
this.updateConversationProgress(newMessages.length > 0)
},
deep: true
}
},
methods: {
connectWebSocket() {
this.isConnecting = true
this.connectionStatus = 'connecting'
try {
this.websocket = new WebSocket(`${this.wsBaseUrl}/ws/speech/indonesian`)
this.websocket.onopen = () => {
this.connectionStatus = 'connected'
this.isConnecting = false
console.log('WebSocket connected')
// Request initial greeting from character
this.requestInitialGreeting()
}
this.websocket.onmessage = (event) => {
const data = JSON.parse(event.data)
this.handleWebSocketMessage(data)
}
this.websocket.onclose = () => {
this.connectionStatus = 'disconnected'
this.isConnecting = false
console.log('WebSocket disconnected')
}
this.websocket.onerror = (error) => {
this.connectionStatus = 'error'
this.isConnecting = false
console.error('WebSocket error:', error)
}
} catch (error) {
this.connectionStatus = 'error'
this.isConnecting = false
console.error('Failed to connect:', error)
}
},
handleWebSocketMessage(data) {
if (data.type === 'transcription') {
this.currentTranscription = data.transcript
this.isTranscriptionFinal = data.is_final
if (data.is_final) {
// Only add message if there's actual content
if (data.transcript.trim()) {
this.addMessage('user', data.transcript)
this.lastInteractionTime = Date.now()
this.clearPauseTimer()
this.startPauseDetection()
}
this.currentTranscription = ''
}
} else if (data.type === 'ai_response') {
this.addMessage('ai', data.text, data.audio)
// Update goal status
if (data.goal_status) {
this.goalStatus = data.goal_status
}
// Check if conversation is complete
if (data.conversation_complete) {
this.conversationComplete = true
this.showConversationComplete()
}
this.lastInteractionTime = Date.now()
this.lastAIResponseTime = Date.now()
// Start pause detection after AI response
this.startPauseDetection()
} else if (data.type === 'recording_timeout') {
// Only show notification if AI is not currently speaking AND user was actively speaking recently
const timeSinceAIResponse = this.lastAIResponseTime ? Date.now() - this.lastAIResponseTime : Infinity
const timeSinceLastVoice = this.lastVoiceActivityTime ? Date.now() - this.lastVoiceActivityTime : Infinity
// Don't show notification if:
// 1. AI response was very recent (within 15 seconds)
// 2. User hasn't spoken recently (within 30 seconds)
// 3. User spoke very recently (within 5 seconds - likely just pausing)
if (timeSinceAIResponse > 15000 &&
timeSinceLastVoice < 30000 &&
timeSinceLastVoice > 5000) {
this.showRecordingStoppedNotification = true
setTimeout(() => {
this.showRecordingStoppedNotification = false
}, 5000) // Hide after 5 seconds
}
} else if (data.type === 'error') {
this.addMessage('error', data.message)
}
},
async toggleRecording() {
if (this.isRecording) {
this.stopRecording()
} else {
await this.startRecording()
}
},
async startRecording() {
if (this.connectionStatus !== 'connected') {
alert('Please wait for connection to establish')
return
}
try {
this.audioStream = await navigator.mediaDevices.getUserMedia({
audio: {
sampleRate: 48000,
channelCount: 1,
echoCancellation: true,
noiseSuppression: true
}
})
// Initialize audio context for voice activity detection
if (!this.audioContext) {
this.audioContext = new (window.AudioContext || window.webkitAudioContext)()
}
this.mediaRecorder = new MediaRecorder(this.audioStream, {
mimeType: 'audio/webm;codecs=opus'
})
this.mediaRecorder.ondataavailable = (event) => {
if (event.data.size > 0) {
this.sendAudioChunk(event.data)
}
}
this.mediaRecorder.start(250) // Send chunks every 250ms
this.isRecording = true
this.currentTranscription = ''
this.isTranscriptionFinal = false
// Send audio_start message
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
const startMessage = {
type: 'audio_start',
scenario_context: this.scenario
}
this.websocket.send(JSON.stringify(startMessage))
}
} catch (error) {
console.error('Error starting recording:', error)
alert('Error accessing microphone: ' + error.message)
}
},
stopRecording() {
if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') {
this.mediaRecorder.stop()
}
if (this.audioStream) {
this.audioStream.getTracks().forEach(track => track.stop())
}
// Send audio_end message to trigger final processing
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
const endMessage = {
type: 'audio_end',
scenario_context: this.scenario
}
this.websocket.send(JSON.stringify(endMessage))
}
this.isRecording = false
this.isAutoListening = false
this.currentTranscription = 'Processing...'
this.isTranscriptionFinal = false
},
async sendAudioChunk(audioBlob) {
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
const arrayBuffer = await audioBlob.arrayBuffer()
const base64Audio = btoa(String.fromCharCode(...new Uint8Array(arrayBuffer)))
const message = {
type: 'audio_chunk',
audio: base64Audio,
scenario_context: this.scenario
}
this.websocket.send(JSON.stringify(message))
}
},
sendTextMessage() {
if (!this.textInput.trim() || this.connectionStatus !== 'connected') return
const message = {
type: 'text_message',
text: this.textInput,
scenario_context: this.scenario
}
this.addMessage('user', this.textInput)
this.websocket.send(JSON.stringify(message))
this.textInput = ''
this.lastInteractionTime = Date.now()
this.clearPauseTimer()
this.startPauseDetection()
},
addMessage(type, text, audio = null) {
const message = {
id: this.messageId++,
type,
text,
audio: audio ? `data:audio/mp3;base64,${audio}` : null,
timestamp: new Date(),
// Translation properties for AI messages
showTranslation: false,
loadingTranslation: false,
englishTranslation: null
}
this.messages.push(message)
this.$nextTick(() => {
this.scrollToBottom()
// Auto-start recording after AI audio finishes playing
if (type === 'ai' && audio) {
this.setupAutoRecording(message)
}
})
},
scrollToBottom() {
const container = this.$refs.messagesContainer
if (container) {
container.scrollTop = container.scrollHeight
}
},
formatTime(timestamp) {
return timestamp.toLocaleTimeString('en-US', {
hour12: false,
hour: '2-digit',
minute: '2-digit'
})
},
getStatusMessage() {
const statusMessages = {
connecting: 'Connecting...',
connected: 'Connected',
disconnected: 'Disconnected',
error: 'Connection Error'
}
return statusMessages[this.connectionStatus] || 'Unknown'
},
resetConversation() {
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
const resetMessage = {
type: 'conversation_reset'
}
this.websocket.send(JSON.stringify(resetMessage))
}
// Clear frontend messages
this.messages = []
this.currentTranscription = ''
this.isTranscriptionFinal = false
this.isFinished = false
this.conversationFeedback = null
// Reset goals
this.resetGoals()
},
disconnect() {
this.stopRecording()
if (this.websocket) {
this.websocket.close()
}
},
async loadScenarioData() {
try {
const response = await fetch(`${this.wsBaseUrl.replace('ws', 'http')}/api/scenarios/indonesian`)
const scenarios = await response.json()
this.scenarioData = scenarios[this.scenario]
if (this.scenarioData) {
this.goalStatus = {
scenario_goal: this.scenarioData.goal,
goal_items: this.scenarioData.goal_items.map(item => ({
...item,
completed: false
})),
all_completed: false
}
}
} catch (error) {
console.error('Failed to load scenario data:', error)
}
},
showConversationComplete() {
// Add a completion message
this.addMessage('system', '🎉 Congratulations! You have completed all goals for this scenario!')
// Auto-scroll to show completion
setTimeout(() => {
this.scrollToBottom()
}, 100)
},
resetGoals() {
if (this.scenarioData) {
this.goalStatus = {
scenario_goal: this.scenarioData.goal,
goal_items: this.scenarioData.goal_items.map(item => ({
...item,
completed: false
})),
all_completed: false
}
}
this.conversationComplete = false
this.isFinished = false
this.conversationFeedback = null
},
resetConversationOnScenarioChange() {
// Load new scenario data
this.loadScenarioData()
// Reset conversation state
this.messages = []
this.currentTranscription = ''
this.isTranscriptionFinal = false
this.conversationComplete = false
this.isFinished = false
this.conversationFeedback = null
// Send reset message to backend if connected
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
const resetMessage = {
type: 'conversation_reset'
}
this.websocket.send(JSON.stringify(resetMessage))
// Request initial greeting for new scenario
this.requestInitialGreeting()
}
},
async requestTranslation(message) {
// Set loading state
message.loadingTranslation = true
try {
const response = await fetch('/api/translate', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
text: message.text,
source_language: 'id',
target_language: 'en'
})
})
if (!response.ok) {
throw new Error('Translation failed')
}
const data = await response.json()
message.englishTranslation = data.translation
message.showTranslation = true
} catch (error) {
console.error('Translation error:', error)
message.englishTranslation = 'Translation failed. Please try again.'
message.showTranslation = true
} finally {
message.loadingTranslation = false
}
},
hideTranslation(message) {
message.showTranslation = false
},
requestInitialGreeting() {
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
// Add a small delay to ensure scenario is properly set
setTimeout(() => {
if (this.scenario) {
const greetingMessage = {
type: 'initial_greeting',
scenario_context: this.scenario
}
this.websocket.send(JSON.stringify(greetingMessage))
console.log('Sent initial greeting for scenario:', this.scenario)
} else {
console.log('No scenario set, retrying in 200ms')
// Retry if scenario not set yet
setTimeout(() => this.requestInitialGreeting(), 200)
}
}, 100)
}
},
setupAutoRecording(message) {
// Wait for the audio element to be created in the DOM
this.$nextTick(() => {
// Find the audio element for this message
const audioElements = this.$refs.messagesContainer.querySelectorAll('audio')
const latestAudio = audioElements[audioElements.length - 1]
if (latestAudio) {
latestAudio.addEventListener('ended', () => {
// Small delay before starting recording to feel natural
setTimeout(() => {
if (!this.isRecording && !this.conversationComplete) {
this.startAutoRecording()
}
console.log('Audio ended, starting pause detection')
// Also start pause detection for suggestions
this.startPauseDetection()
}, 500)
// Also start a longer timer for suggestions that works regardless of recording state
setTimeout(() => {
if (this.suggestionsEnabled && !this.showSuggestionPopup && !this.conversationComplete) {
console.log('Long pause detected, showing suggestions')
this.showSuggestions()
}
}, 15000) // 15 seconds after audio ends
})
}
})
},
async startAutoRecording() {
try {
this.isAutoListening = true
await this.startRecording()
// Start voice activity detection for auto-stop
this.startVoiceActivityDetection()
} catch (error) {
console.error('Auto-recording failed:', error)
this.isAutoListening = false
}
},
startVoiceActivityDetection() {
let silenceTimer = null
let voiceDetected = false
const silenceThreshold = 3000 // 3 seconds of silence (increased to prevent premature stopping)
// Create analyzer for voice activity detection
if (this.audioStream && this.audioContext) {
const source = this.audioContext.createMediaStreamSource(this.audioStream)
const analyzer = this.audioContext.createAnalyser()
analyzer.fftSize = 256
source.connect(analyzer)
const bufferLength = analyzer.frequencyBinCount
const dataArray = new Uint8Array(bufferLength)
const checkVoiceActivity = () => {
if (!this.isRecording) return
analyzer.getByteFrequencyData(dataArray)
// Calculate average volume
const average = dataArray.reduce((a, b) => a + b) / bufferLength
const volumeThreshold = 15 // Lowered threshold to be more sensitive to speech
if (average > volumeThreshold) {
// Voice detected
voiceDetected = true
this.lastVoiceActivityTime = Date.now()
this.clearPauseTimer() // Clear suggestion timer when user speaks
// Don't auto-close suggestions when user starts speaking
// User can manually close them if needed
// if (this.showSuggestionPopup) {
// this.closeSuggestionPopup()
// }
if (silenceTimer) {
clearTimeout(silenceTimer)
silenceTimer = null
}
} else if (voiceDetected) {
// Silence detected after voice
if (!silenceTimer) {
silenceTimer = setTimeout(() => {
if (this.isRecording) {
this.stopRecording()
// Show notification when recording stops due to long pause
this.showRecordingStoppedNotification = true
setTimeout(() => {
this.showRecordingStoppedNotification = false
}, 5000) // Hide after 5 seconds
}
}, silenceThreshold)
}
}
if (this.isRecording) {
requestAnimationFrame(checkVoiceActivity)
}
}
checkVoiceActivity()
}
},
getTranscriptionStatus() {
if (this.isTranscriptionFinal) {
return 'Final'
} else if (this.isAutoListening) {
return 'Auto-listening...'
} else {
return 'Listening...'
}
},
forceStopRecording() {
console.log('Force stopping recording')
this.stopRecording()
},
startPauseDetection() {
if (!this.suggestionsEnabled) {
console.log('Suggestions disabled, not starting pause detection')
return
}
console.log('Starting pause detection timer')
this.clearPauseTimer()
this.pauseTimer = setTimeout(() => {
console.log('Pause timer fired - checking conditions:', {
isRecording: this.isRecording,
conversationComplete: this.conversationComplete,
messagesLength: this.messages.length,
suggestionsEnabled: this.suggestionsEnabled,
showSuggestionPopup: this.showSuggestionPopup
})
// Show suggestions if user is paused (either not recording or recording but silent)
if (!this.conversationComplete && this.messages.length > 0 && !this.showSuggestionPopup) {
console.log('Conditions met, showing suggestions')
this.showSuggestions()
} else {
console.log('Conditions not met, not showing suggestions')
}
}, 15000) // 15 seconds pause detection (increased to prevent interrupting speech)
},
clearPauseTimer() {
if (this.pauseTimer) {
clearTimeout(this.pauseTimer)
this.pauseTimer = null
}
},
async showSuggestions() {
if (!this.suggestionsEnabled || this.showSuggestionPopup) return
// Check if user was speaking recently (within last 5 seconds)
const timeSinceLastVoice = this.lastVoiceActivityTime ? Date.now() - this.lastVoiceActivityTime : Infinity
if (timeSinceLastVoice < 5000) {
console.log('Recent voice activity detected, delaying suggestions')
return
}
try {
const requestData = {
language: 'indonesian',
scenario: this.scenario,
conversation_history: this.messages.slice(-4).map(msg => ({
type: msg.type,
text: msg.text
}))
}
console.log('Sending suggestions request:', requestData)
const response = await fetch(`${this.wsBaseUrl.replace('ws', 'http')}/api/suggestions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(requestData)
})
console.log('Response status:', response.status)
if (response.ok) {
const data = await response.json()
console.log('Suggestions response:', data)
this.suggestionContent = data
this.showSuggestionPopup = true
// Refresh the recording session when suggestions appear to prevent audio corruption
this.$nextTick(() => {
if (this.isRecording) {
console.log('Refreshing recording session after suggestions')
this.stopRecording()
setTimeout(() => {
this.startAutoRecording()
}, 500)
}
})
} else {
console.error('Suggestions API error:', response.status, await response.text())
}
} catch (error) {
console.error('Error fetching suggestions:', error)
}
},
closeSuggestionPopup() {
console.log('Closing suggestion popup')
this.showSuggestionPopup = false
this.suggestionContent = null
// Restart recording to ensure audio capture works properly
if (this.isRecording) {
console.log('Restarting recording after closing suggestions')
this.stopRecording()
setTimeout(() => {
this.startAutoRecording()
}, 200)
}
},
async finishConversation() {
// Stop any ongoing recording
if (this.isRecording) {
this.stopRecording()
}
// Mark conversation as finished
this.isFinished = true
// Close suggestion popup if open
if (this.showSuggestionPopup) {
this.closeSuggestionPopup()
}
try {
// Send conversation data to backend for feedback
const response = await fetch(`${this.wsBaseUrl.replace('ws', 'http')}/api/conversation-feedback`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
language: 'indonesian',
scenario: this.scenario,
conversation_history: this.messages.map(msg => ({
type: msg.type,
text: msg.text,
timestamp: msg.timestamp
}))
})
})
if (response.ok) {
const feedback = await response.json()
this.conversationFeedback = feedback
// Scroll to show feedback
this.$nextTick(() => {
this.scrollToBottom()
})
} else {
console.error('Failed to get conversation feedback:', response.status)
// Show a fallback message
this.conversationFeedback = {
encouragement: "Great job practicing! Every conversation helps you improve.",
suggestions: [],
examples: []
}
}
} catch (error) {
console.error('Error getting conversation feedback:', error)
// Show a fallback message
this.conversationFeedback = {
encouragement: "Great job practicing! Every conversation helps you improve.",
suggestions: [],
examples: []
}
}
},
}
}
</script>
<style scoped>
.speech-interface {
height: 100%;
display: flex;
flex-direction: column;
background: transparent;
padding: 1.5rem; /* Add padding to prevent goal section from touching edges */
}
.conversation-area {
flex: 1;
display: flex;
flex-direction: column;
min-height: 0;
}
.messages {
flex: 1;
overflow-y: auto;
padding: 1.5rem;
background: var(--surface-alt);
margin-bottom: 1rem;
border-radius: var(--radius) var(--radius) 0 0;
min-height: 300px;
scrollbar-width: thin;
scrollbar-color: var(--border) var(--surface-alt);
}
.messages::-webkit-scrollbar {
width: 6px;
}
.messages::-webkit-scrollbar-track {
background: var(--surface-alt);
}
.messages::-webkit-scrollbar-thumb {
background: var(--border);
border-radius: 3px;
}
.messages::-webkit-scrollbar-thumb:hover {
background: var(--text-muted);
}
.message {
margin-bottom: 1rem;
padding: 1rem;
border-radius: var(--radius-lg);
max-width: 75%;
position: relative;
animation: messageSlide 0.3s ease;
}
@keyframes messageSlide {
from {
opacity: 0;
transform: translateY(10px);
}
to {
opacity: 1;
transform: translateY(0);
}
}
.message.user {
background: var(--primary);
color: white;
margin-left: auto;
border-bottom-right-radius: 4px;
}
.message.ai {
background: var(--surface);
border: 1px solid var(--border);
margin-right: auto;
border-bottom-left-radius: 4px;
}
.message.system {
background: var(--accent);
color: white;
margin: 0 auto;
text-align: center;
font-weight: 500;
border-radius: var(--radius-lg);
box-shadow: var(--shadow);
}
.message.error {
background: #fef2f2;
color: #dc2626;
border: 1px solid #fecaca;
margin-right: auto;
}
.message-content {
margin-bottom: 0.5rem;
}
.message-content p {
margin: 0;
line-height: 1.5;
}
.audio-player {
margin-top: 0.75rem;
}
.audio-player audio {
width: 100%;
max-width: 250px;
height: 32px;
border-radius: 16px;
}
.timestamp {
font-size: 0.75rem;
color: var(--text-muted);
opacity: 0.7;
font-weight: 400;
}
.message.user .timestamp {
color: rgba(255, 255, 255, 0.8);
}
/* Translation styles */
.translation-section {
margin-top: 0.75rem;
border-top: 1px solid var(--border);
padding-top: 0.75rem;
}
.translation-toggle {
background: var(--surface-alt);
border: 1px solid var(--border);
color: var(--text-light);
padding: 0.5rem 0.75rem;
border-radius: var(--radius);
font-size: 0.75rem;
font-weight: 500;
cursor: pointer;
transition: all 0.2s ease;
display: inline-flex;
align-items: center;
gap: 0.25rem;
}
.translation-toggle:hover {
background: var(--primary);
color: white;
border-color: var(--primary);
transform: translateY(-1px);
}
.translation-toggle:disabled {
opacity: 0.6;
cursor: not-allowed;
transform: none;
}
.translation-toggle.hide {
background: var(--surface);
color: var(--text-muted);
font-size: 0.7rem;
padding: 0.25rem 0.5rem;
}
.translation-toggle.hide:hover {
background: var(--text-muted);
color: white;
}
.translation-content {
margin-top: 0.5rem;
}
.translation-text {
background: var(--surface-alt);
border: 1px solid var(--border);
border-radius: var(--radius);
padding: 0.75rem;
margin: 0 0 0.5rem 0;
font-style: italic;
color: var(--text-light);
font-size: 0.9rem;
line-height: 1.4;
position: relative;
}
.translation-text::before {
content: "🇺🇸";
position: absolute;
top: 0.5rem;
right: 0.75rem;
font-size: 0.8rem;
opacity: 0.6;
}
.transcription-display {
background: var(--surface);
border: 1px solid var(--border);
padding: 1rem;
border-radius: var(--radius);
margin-bottom: 1rem;
border-left: 3px solid var(--secondary);
}
.transcription-text {
font-style: italic;
margin-bottom: 0.5rem;
color: var(--text-light);
}
.transcription-status {
font-size: 0.8rem;
color: var(--text-muted);
font-weight: 500;
}
.recording-stopped-notification {
background: var(--secondary);
color: white;
padding: 1rem;
border-radius: var(--radius);
margin-bottom: 1rem;
text-align: center;
box-shadow: var(--shadow);
animation: slideIn 0.3s ease;
}
.recording-stopped-notification p {
margin: 0;
font-weight: 500;
font-size: 0.9rem;
}
.controls {
display: flex;
gap: 0.75rem;
align-items: center;
padding: 1rem;
background: var(--surface);
border-radius: 0 0 var(--radius) var(--radius);
border-top: 1px solid var(--border);
}
.record-btn {
background: var(--accent);
color: white;
border: none;
padding: 0.75rem 1.25rem;
border-radius: var(--radius-lg);
cursor: pointer;
font-family: 'DM Sans', sans-serif;
font-size: 0.9rem;
font-weight: 500;
transition: all 0.2s cubic-bezier(0.4, 0, 0.2, 1);
min-width: 100px;
display: flex;
align-items: center;
gap: 0.5rem;
box-shadow: var(--shadow-sm);
}
.record-btn:hover {
background: #059669;
transform: translateY(-1px);
box-shadow: var(--shadow);
}
.record-btn.recording {
background: #dc2626;
animation: recordingPulse 2s ease-in-out infinite;
}
.record-btn:disabled {
background: var(--text-muted);
cursor: not-allowed;
transform: none;
box-shadow: none;
}
@keyframes recordingPulse {
0%, 100% {
transform: scale(1);
box-shadow: 0 0 0 0 rgba(220, 38, 38, 0.4);
}
50% {
transform: scale(1.02);
box-shadow: 0 0 0 8px rgba(220, 38, 38, 0);
}
}
.text-input {
flex: 1;
padding: 0.75rem 1rem;
border: 1px solid var(--border);
border-radius: var(--radius-lg);
font-family: 'DM Sans', sans-serif;
font-size: 0.9rem;
background: var(--surface);
color: var(--text);
transition: all 0.2s ease;
}
.text-input:focus {
outline: none;
border-color: var(--primary);
box-shadow: 0 0 0 3px rgba(37, 99, 235, 0.1);
}
.text-input::placeholder {
color: var(--text-muted);
}
.send-btn {
background: var(--primary);
color: white;
border: none;
padding: 0.75rem 1.25rem;
border-radius: var(--radius-lg);
cursor: pointer;
font-family: 'DM Sans', sans-serif;
font-size: 0.9rem;
font-weight: 500;
transition: all 0.2s cubic-bezier(0.4, 0, 0.2, 1);
box-shadow: var(--shadow-sm);
}
.send-btn:hover {
background: var(--primary-dark);
transform: translateY(-1px);
box-shadow: var(--shadow);
}
.send-btn:disabled {
background: var(--text-muted);
cursor: not-allowed;
transform: none;
box-shadow: none;
}
.reset-btn {
background: var(--surface);
color: var(--text-light);
border: 1px solid var(--border);
padding: 0.75rem 1rem;
border-radius: var(--radius-lg);
cursor: pointer;
font-family: 'DM Sans', sans-serif;
font-size: 0.9rem;
font-weight: 500;
transition: all 0.2s cubic-bezier(0.4, 0, 0.2, 1);
}
.reset-btn:hover {
background: var(--surface-alt);
border-color: var(--text-muted);
transform: translateY(-1px);
}
.finish-btn {
background: var(--accent);
color: white;
border: none;
padding: 0.75rem 1.25rem;
border-radius: var(--radius-lg);
cursor: pointer;
font-family: 'DM Sans', sans-serif;
font-size: 0.9rem;
font-weight: 500;
transition: all 0.2s cubic-bezier(0.4, 0, 0.2, 1);
box-shadow: var(--shadow-sm);
}
.finish-btn:hover:not(:disabled) {
background: #047857;
transform: translateY(-1px);
box-shadow: var(--shadow);
}
.finish-btn:disabled {
background: var(--text-muted);
cursor: not-allowed;
transform: none;
box-shadow: none;
}
.force-stop-btn {
background: #dc2626;
color: white;
border: none;
padding: 0.75rem;
border-radius: var(--radius-lg);
cursor: pointer;
font-size: 1rem;
transition: all 0.2s ease;
margin-left: 0.5rem;
width: 40px;
height: 40px;
display: flex;
align-items: center;
justify-content: center;
}
.force-stop-btn:hover {
background: #b91c1c;
transform: translateY(-1px);
}
.status {
padding: 0.75rem;
text-align: center;
}
.status-indicator {
display: inline-flex;
align-items: center;
gap: 0.5rem;
padding: 0.5rem 1rem;
border-radius: var(--radius-lg);
font-size: 0.85rem;
font-weight: 500;
}
.status-indicator.connected {
background: #f0fdf4;
color: var(--accent);
border: 1px solid #bbf7d0;
}
.status-indicator.connecting {
background: #fffbeb;
color: var(--secondary);
border: 1px solid #fed7aa;
}
.status-indicator.disconnected,
.status-indicator.error {
background: #fef2f2;
color: #dc2626;
border: 1px solid #fecaca;
}
/* Goal Progress Styles */
.goal-section {
background: var(--surface);
border: 1px solid var(--border);
border-radius: var(--radius-lg);
padding: 1.5rem;
margin-bottom: 1.5rem;
box-shadow: var(--shadow-sm);
position: relative;
overflow: hidden;
}
.goal-section::before {
content: '';
position: absolute;
top: 0;
left: 0;
right: 0;
height: 3px;
background: var(--primary);
}
.goal-title {
margin: 0 0 1rem 0;
color: var(--text);
font-size: 1.1rem;
font-weight: 600;
display: flex;
align-items: center;
gap: 0.5rem;
}
.goal-checklist {
margin-bottom: 1rem;
}
.goal-item {
display: flex;
align-items: center;
padding: 0.75rem 0;
transition: all 0.2s ease;
border-radius: var(--radius);
}
.goal-item.completed {
opacity: 0.7;
}
.goal-checkbox {
margin-right: 0.75rem;
font-size: 1.1rem;
transition: transform 0.2s ease;
}
.goal-item.completed .goal-checkbox {
transform: scale(1.1);
}
.goal-description {
color: var(--text-light);
font-weight: 500;
font-size: 0.9rem;
}
.goal-item.completed .goal-description {
text-decoration: line-through;
color: var(--text-muted);
}
.completion-banner {
background: var(--accent);
color: white;
padding: 1.25rem;
border-radius: var(--radius-lg);
text-align: center;
font-weight: 600;
font-size: 1rem;
box-shadow: var(--shadow);
animation: celebrationPulse 3s ease-in-out;
}
@keyframes celebrationPulse {
0%, 100% {
transform: scale(1);
box-shadow: var(--shadow);
}
50% {
transform: scale(1.02);
box-shadow: var(--shadow-lg);
}
}
@media (max-width: 768px) {
.controls {
gap: 0.5rem;
padding: 0.75rem;
flex-wrap: wrap;
}
.suggestion-toggle-container {
margin-left: 0;
margin-top: 0.5rem;
}
.suggestion-panel {
margin-bottom: 0.75rem;
}
.suggestion-panel-content {
padding: 0.75rem;
}
.suggestion-panel-item {
padding: 0.5rem;
}
.record-btn,
.send-btn {
padding: 0.625rem 1rem;
font-size: 0.85rem;
min-width: 80px;
}
.text-input {
min-width: 0;
flex: 1 1 100%;
order: -1;
margin-bottom: 0.5rem;
}
.speech-interface {
padding: 1rem; /* Reduce padding on mobile */
}
.goal-section {
padding: 1rem;
}
.messages {
padding: 1rem;
min-height: 250px;
}
}
/* Suggestion Toggle Styles */
.suggestion-toggle-container {
display: flex;
align-items: center;
gap: 0.5rem;
margin-left: 0.5rem;
}
.suggestion-toggle-label {
font-size: 0.8rem;
color: var(--text-light);
font-weight: 500;
cursor: pointer;
}
.suggestion-toggle {
width: 16px;
height: 16px;
cursor: pointer;
accent-color: var(--primary);
}
/* Inline Suggestion Panel Styles */
.suggestion-panel {
background: var(--surface);
border: 1px solid var(--primary);
border-radius: var(--radius);
margin-bottom: 1rem;
box-shadow: var(--shadow-sm);
animation: slideIn 0.3s ease;
}
@keyframes slideIn {
from {
opacity: 0;
transform: translateY(-10px);
}
to {
opacity: 1;
transform: translateY(0);
}
}
.suggestion-panel-header {
display: flex;
justify-content: space-between;
align-items: center;
padding: 0.75rem 1rem;
border-bottom: 1px solid var(--border);
background: var(--surface-alt);
}
.suggestion-panel-header h4 {
margin: 0;
color: var(--text);
font-size: 0.95rem;
font-weight: 600;
}
.suggestion-close-btn {
background: none;
border: none;
font-size: 1rem;
cursor: pointer;
color: var(--text-muted);
padding: 0.25rem;
border-radius: var(--radius);
transition: all 0.2s ease;
width: 24px;
height: 24px;
display: flex;
align-items: center;
justify-content: center;
}
.suggestion-close-btn:hover {
background: var(--surface);
color: var(--text);
}
.suggestion-panel-content {
padding: 1rem;
}
.suggestion-panel-intro {
color: var(--text-light);
font-size: 0.85rem;
margin-bottom: 0.75rem;
line-height: 1.4;
}
.suggestion-panel-list {
display: flex;
flex-direction: column;
gap: 0.5rem;
}
.suggestion-panel-item {
background: var(--surface-alt);
border: 1px solid var(--border);
border-radius: var(--radius);
padding: 0.75rem;
transition: all 0.2s ease;
}
.suggestion-panel-text {
margin-bottom: 0.25rem;
font-size: 0.9rem;
font-weight: 500;
color: var(--text);
}
.suggestion-panel-translation {
font-size: 0.8rem;
opacity: 0.7;
font-style: italic;
color: var(--text-muted);
}
/* Feedback Section Styles */
.feedback-section {
background: var(--surface);
border: 1px solid var(--accent);
border-radius: var(--radius-lg);
padding: 1.5rem;
margin-bottom: 1.5rem;
box-shadow: var(--shadow-sm);
position: relative;
overflow: hidden;
}
.feedback-section::before {
content: '';
position: absolute;
top: 0;
left: 0;
right: 0;
height: 3px;
background: var(--accent);
}
.feedback-title {
margin: 0 0 1rem 0;
color: var(--text);
font-size: 1.2rem;
font-weight: 600;
text-align: center;
}
.feedback-content {
margin-bottom: 1rem;
}
.feedback-encouragement {
font-size: 1rem;
color: var(--text-light);
text-align: center;
margin-bottom: 1.5rem;
font-style: italic;
}
.feedback-suggestions h4,
.feedback-examples h4 {
color: var(--text);
font-size: 1rem;
font-weight: 600;
margin: 1.5rem 0 1rem 0;
}
.feedback-suggestion {
background: var(--surface-alt);
border-radius: var(--radius);
padding: 1rem;
margin-bottom: 0.75rem;
border-left: 3px solid var(--primary);
}
.suggestion-title {
font-weight: 600;
color: var(--text);
margin-bottom: 0.5rem;
}
.suggestion-text {
color: var(--text-light);
line-height: 1.5;
}
.feedback-example {
background: var(--surface-alt);
border-radius: var(--radius);
padding: 1rem;
margin-bottom: 0.75rem;
border-left: 3px solid var(--secondary);
}
.example-original {
margin-bottom: 0.5rem;
color: var(--text-light);
}
.example-improved {
margin-bottom: 0.5rem;
color: var(--text);
}
.example-reason {
color: var(--text-muted);
font-size: 0.9rem;
}
.feedback-encouragement-final {
text-align: center;
margin-top: 1.5rem;
padding: 1rem;
background: var(--accent);
color: white;
border-radius: var(--radius);
font-weight: 500;
}
</style>