street-lingo/apps/shared/components/BaseSpeechInterface.vue

997 lines
30 KiB
Vue

<template>
<div class="speech-interface">
<!-- Tutorial Popup -->
<div v-if="showTutorial" class="tutorial-overlay" @click="closeTutorial">
<div class="tutorial-popup" @click.stop>
<div class="tutorial-header">
<h3>Welcome to Street Lingo! 🎉</h3>
<button @click="closeTutorial" class="tutorial-close">✕</button>
</div>
<div class="tutorial-content">
<div class="tutorial-step">
<div class="tutorial-icon">🎙️</div>
<div class="tutorial-text">
<strong>Practice Speaking</strong>
<p>Click "Start Conversation" to begin chatting with AI characters in real {{ config.name }} scenarios</p>
</div>
</div>
<div class="tutorial-step">
<div class="tutorial-icon">💡</div>
<div class="tutorial-text">
<strong>Get Help</strong>
<p>Enable the "Help" toggle and pause during conversation to get helpful phrase suggestions</p>
</div>
</div>
<div class="tutorial-step">
<div class="tutorial-icon">📊</div>
<div class="tutorial-text">
<strong>Conversation Analysis</strong>
<p>Click "Finish Conversation" to get personalized feedback on your {{ config.name }}</p>
</div>
</div>
</div>
<div class="tutorial-footer">
<button @click="closeTutorial" class="tutorial-got-it">Got it!</button>
</div>
</div>
</div>
<div class="conversation-area">
<div class="messages" ref="messagesContainer">
<div
v-for="message in messages"
:key="message.id"
:class="['message', message.type]"
>
<div class="message-content">
<p>{{ message.text }}</p>
<div v-if="message.type === 'ai'" class="translation-section">
<button
v-if="!message.showTranslation"
@click="requestTranslation(message)"
:disabled="message.loadingTranslation"
class="translation-toggle"
>
{{ message.loadingTranslation ? 'Translating...' : '🌍 Show English' }}
</button>
<div v-if="message.showTranslation" class="translation-content">
<p class="translation-text">{{ message.englishTranslation }}</p>
<button @click="hideTranslation(message)" class="translation-toggle hide">
✕ Hide English
</button>
</div>
</div>
<div v-if="message.audio" class="audio-player">
<audio :src="message.audio" controls autoplay></audio>
</div>
</div>
<span class="timestamp">{{ formatTime(message.timestamp) }}</span>
</div>
</div>
<div class="transcription-display" v-if="currentTranscription">
<p class="transcription-text">{{ currentTranscription }}</p>
<span class="transcription-status">{{ getTranscriptionStatus() }}</span>
</div>
<!-- Recording Stopped Notification -->
<div v-if="showRecordingStoppedNotification" class="recording-stopped-notification">
<p>🛑 Recording stopped. Click "Speak" to continue.</p>
</div>
<!-- Inline Suggestion Panel -->
<div v-if="showSuggestionPopup" class="suggestion-panel">
<div class="suggestion-panel-header">
<h4>💡 Help Suggestions</h4>
<button @click="closeSuggestionPopup" class="suggestion-close-btn">✕</button>
</div>
<div class="suggestion-panel-content" v-if="suggestionContent">
<p class="suggestion-panel-intro">{{ suggestionContent.intro }}</p>
<div class="suggestion-panel-list">
<div
v-for="(suggestion, index) in suggestionContent.suggestions"
:key="index"
class="suggestion-panel-item"
>
<div class="suggestion-panel-text">
<strong>{{ suggestion[config.phrases.nativeField] }}</strong>
</div>
<div class="suggestion-panel-translation">
{{ suggestion[config.phrases.translationField] }}
</div>
</div>
</div>
</div>
</div>
</div>
<div class="controls">
<!-- Start Conversation Button -->
<button
v-if="showStartButton"
@click="startConversation"
class="start-conversation-btn"
:disabled="isConnecting"
>
🎙️ Start Conversation
</button>
<button
v-else
@click="toggleRecording"
:class="['record-btn', { recording: isRecording }]"
:disabled="isConnecting || isFinished"
>
{{ isRecording ? '🛑 Stop' : '🎤 Speak' }}
</button>
<div class="suggestion-toggle-container">
<label class="suggestion-toggle-label" for="suggestion-toggle">
💡 Help
</label>
<input
id="suggestion-toggle"
type="checkbox"
v-model="suggestionsEnabled"
class="suggestion-toggle"
/>
</div>
<input
v-model="textInput"
@keyup.enter="sendTextMessage"
:placeholder="`Type in ${config.name}...`"
class="text-input"
:disabled="isRecording || isFinished"
/>
<button
@click="sendTextMessage"
:disabled="!textInput.trim() || isRecording || isFinished"
class="send-btn"
>
Send
</button>
<button
@click="resetConversation"
class="reset-btn"
title="Start new conversation"
>
🔄 Reset
</button>
<button
@click="finishConversation"
class="finish-btn"
title="End conversation and get assessment"
:disabled="messages.length === 0 || isFinished || isLoadingFeedback"
>
{{ isLoadingFeedback ? 'Analyzing feedback...' : '✓ Finish Conversation' }}
</button>
</div>
<div class="goal-section" v-if="goalStatus">
<h3 class="goal-title">
🎯 Goal: {{ goalStatus.scenario_goal }}
</h3>
<div class="goal-checklist">
<div
v-for="item in goalStatus.goal_items"
:key="item.id"
:class="['goal-item', { completed: item.completed }]"
>
<span class="goal-checkbox">
{{ item.completed ? '✅' : '⏳' }}
</span>
<span class="goal-description">{{ item.description }}</span>
</div>
</div>
<div v-if="conversationComplete" class="completion-banner">
🎉 All goals completed! Well done!
</div>
</div>
<!-- Feedback Loading Notification -->
<div class="feedback-loading" v-if="isLoadingFeedback">
<div class="loading-content">
<div class="loading-spinner"></div>
<p>Analyzing your conversation...</p>
</div>
</div>
<!-- Conversation Feedback -->
<div class="feedback-section" v-if="conversationFeedback">
<h3 class="feedback-title">
🎉 Great conversation! Here's some helpful feedback:
</h3>
<div class="feedback-content">
<div class="feedback-summary">
<p class="feedback-encouragement">{{ conversationFeedback.encouragement }}</p>
</div>
<div class="feedback-suggestions" v-if="conversationFeedback.suggestions && conversationFeedback.suggestions.length > 0">
<h4>💡 Tips to sound even more natural:</h4>
<div
v-for="(suggestion, index) in conversationFeedback.suggestions"
:key="index"
class="feedback-suggestion"
>
<div class="suggestion-title">{{ suggestion.category }}</div>
<div class="suggestion-text">{{ suggestion.tip }}</div>
</div>
</div>
<div class="feedback-examples" v-if="conversationFeedback.examples && conversationFeedback.examples.length > 0">
<h4>🗣️ Alternative ways to express yourself:</h4>
<div
v-for="(example, index) in conversationFeedback.examples"
:key="index"
class="feedback-example"
>
<div class="example-original">
<strong>You said:</strong> "{{ example.original }}"
</div>
<div class="example-improved">
<strong>You could also say:</strong> "{{ example.improved }}"
</div>
<div class="example-reason" v-if="example.reason">
<em>{{ example.reason }}</em>
</div>
</div>
</div>
<div class="feedback-encouragement-final">
<p>Keep practicing - you're doing great! 🌟</p>
</div>
</div>
</div>
<div class="status" v-if="connectionStatus">
<span :class="['status-indicator', connectionStatus]">
{{ getStatusMessage() }}
</span>
</div>
</div>
</template>
<script>
export default {
name: 'BaseSpeechInterface',
inject: ['updateConversationProgress'],
props: {
scenario: {
type: String,
default: ''
},
config: {
type: Object,
required: true
}
},
data() {
return {
messages: [],
currentTranscription: '',
isTranscriptionFinal: false,
isRecording: false,
isConnecting: false,
textInput: '',
connectionStatus: 'disconnected',
websocket: null,
mediaRecorder: null,
audioStream: null,
audioContext: null,
messageId: 0,
wsBaseUrl: import.meta.env.VITE_WS_BASE_URL || 'ws://localhost:8000',
goalStatus: null,
conversationComplete: false,
scenarioData: null,
isAutoListening: false,
suggestionsEnabled: false,
showSuggestionPopup: false,
suggestionContent: null,
pauseTimer: null,
lastInteractionTime: null,
silenceTimer: null,
lastVoiceActivityTime: null,
showRecordingStoppedNotification: false,
isAICurrentlySpeaking: false,
lastAIResponseTime: null,
isFinished: false,
conversationFeedback: null,
isLoadingFeedback: false,
hasRequestedInitialGreeting: false,
showStartButton: true,
showTutorial: false
}
},
mounted() {
this.connectWebSocket()
this.loadScenarioData()
this.checkAndShowTutorial()
},
beforeUnmount() {
this.disconnect()
this.clearPauseTimer()
},
watch: {
scenario: {
handler(newScenario, oldScenario) {
if (oldScenario && newScenario !== oldScenario) {
this.resetConversationOnScenarioChange()
} else if (newScenario && !oldScenario) {
console.log('Scenario initially set to:', newScenario)
}
}
},
messages: {
handler(newMessages) {
this.updateConversationProgress(newMessages.length > 0)
},
deep: true
}
},
methods: {
connectWebSocket() {
this.isConnecting = true
this.connectionStatus = 'connecting'
try {
this.websocket = new WebSocket(`${this.wsBaseUrl}${this.config.wsEndpoint}`)
this.websocket.onopen = () => {
this.connectionStatus = 'connected'
this.isConnecting = false
console.log(`${this.config.name} WebSocket connected`)
}
this.websocket.onmessage = (event) => {
const data = JSON.parse(event.data)
this.handleWebSocketMessage(data)
}
this.websocket.onclose = () => {
this.connectionStatus = 'disconnected'
this.isConnecting = false
console.log(`${this.config.name} WebSocket disconnected`)
}
this.websocket.onerror = (error) => {
this.connectionStatus = 'error'
this.isConnecting = false
console.error(`${this.config.name} WebSocket error:`, error)
}
} catch (error) {
this.connectionStatus = 'error'
this.isConnecting = false
console.error('Failed to connect:', error)
}
},
handleWebSocketMessage(data) {
if (data.type === 'transcription') {
this.currentTranscription = data.transcript
this.isTranscriptionFinal = data.is_final
if (data.is_final) {
if (data.transcript.trim()) {
this.addMessage('user', data.transcript)
this.lastInteractionTime = Date.now()
this.clearPauseTimer()
this.startPauseDetection()
}
this.currentTranscription = ''
}
} else if (data.type === 'ai_response') {
this.addMessage('ai', data.text, data.audio)
if (data.goal_status) {
this.goalStatus = data.goal_status
}
if (data.conversation_complete) {
this.conversationComplete = true
this.showConversationComplete()
}
this.lastInteractionTime = Date.now()
this.isAICurrentlySpeaking = true
this.lastAIResponseTime = Date.now()
this.startPauseDetection()
} else if (data.type === 'recording_timeout') {
const timeSinceAIResponse = this.lastAIResponseTime ? Date.now() - this.lastAIResponseTime : Infinity
const timeSinceLastVoice = this.lastVoiceActivityTime ? Date.now() - this.lastVoiceActivityTime : Infinity
if (!this.isAICurrentlySpeaking &&
timeSinceAIResponse > 15000 &&
timeSinceLastVoice < 30000 &&
timeSinceLastVoice > 5000) {
this.showRecordingStoppedNotification = true
setTimeout(() => {
this.showRecordingStoppedNotification = false
}, 5000)
}
} else if (data.type === 'error') {
this.addMessage('error', data.message)
}
},
async toggleRecording() {
if (this.isRecording) {
this.stopRecording()
} else {
await this.startRecording()
}
},
async startRecording() {
if (this.connectionStatus !== 'connected') {
alert('Please wait for connection to establish')
return
}
try {
this.audioStream = await navigator.mediaDevices.getUserMedia({
audio: {
sampleRate: 48000,
channelCount: 1,
echoCancellation: true,
noiseSuppression: true
}
})
if (!this.audioContext) {
this.audioContext = new (window.AudioContext || window.webkitAudioContext)()
}
this.mediaRecorder = new MediaRecorder(this.audioStream, {
mimeType: 'audio/webm;codecs=opus'
})
this.mediaRecorder.ondataavailable = (event) => {
if (event.data.size > 0) {
this.sendAudioChunk(event.data)
}
}
this.mediaRecorder.start(250)
this.isRecording = true
this.currentTranscription = ''
this.isTranscriptionFinal = false
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
const startMessage = {
type: 'audio_start',
scenario_context: this.scenario
}
this.websocket.send(JSON.stringify(startMessage))
}
} catch (error) {
console.error('Error starting recording:', error)
alert('Error accessing microphone: ' + error.message)
}
},
stopRecording() {
if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') {
this.mediaRecorder.stop()
}
if (this.audioStream) {
this.audioStream.getTracks().forEach(track => track.stop())
}
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
const endMessage = {
type: 'audio_end',
scenario_context: this.scenario
}
this.websocket.send(JSON.stringify(endMessage))
}
this.isRecording = false
this.isAutoListening = false
this.currentTranscription = 'Processing...'
this.isTranscriptionFinal = false
},
async sendAudioChunk(audioBlob) {
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
const arrayBuffer = await audioBlob.arrayBuffer()
const base64Audio = btoa(String.fromCharCode(...new Uint8Array(arrayBuffer)))
const message = {
type: 'audio_chunk',
audio: base64Audio,
scenario_context: this.scenario
}
this.websocket.send(JSON.stringify(message))
}
},
sendTextMessage() {
if (!this.textInput.trim() || this.connectionStatus !== 'connected') return
const message = {
type: 'text_message',
text: this.textInput,
scenario_context: this.scenario
}
this.addMessage('user', this.textInput)
this.websocket.send(JSON.stringify(message))
this.textInput = ''
this.lastInteractionTime = Date.now()
this.clearPauseTimer()
this.startPauseDetection()
},
addMessage(type, text, audio = null) {
const message = {
id: this.messageId++,
type,
text,
audio: audio ? `data:audio/wav;base64,${audio}` : null,
timestamp: new Date(),
showTranslation: false,
loadingTranslation: false,
englishTranslation: null
}
this.messages.push(message)
this.$nextTick(() => {
this.scrollToBottom()
if (type === 'ai' && audio) {
this.setupAutoRecording(message)
}
})
},
scrollToBottom() {
const container = this.$refs.messagesContainer
if (container) {
container.scrollTop = container.scrollHeight
}
},
formatTime(timestamp) {
return timestamp.toLocaleTimeString('en-US', {
hour12: false,
hour: '2-digit',
minute: '2-digit'
})
},
getStatusMessage() {
const statusMessages = {
connecting: 'Connecting...',
connected: 'Connected',
disconnected: 'Disconnected',
error: 'Connection Error'
}
return statusMessages[this.connectionStatus] || 'Unknown'
},
resetConversation() {
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
const resetMessage = {
type: 'conversation_reset'
}
this.websocket.send(JSON.stringify(resetMessage))
}
this.messages = []
this.currentTranscription = ''
this.isTranscriptionFinal = false
this.isFinished = false
this.conversationFeedback = null
this.resetGoals()
},
disconnect() {
this.stopRecording()
if (this.websocket) {
this.websocket.close()
}
},
async loadScenarioData() {
try {
const response = await fetch(`${this.wsBaseUrl.replace('ws', 'http')}${this.config.apiEndpoint}`)
const scenarios = await response.json()
this.scenarioData = scenarios[this.scenario]
if (this.scenarioData) {
this.goalStatus = {
scenario_goal: this.scenarioData.goal,
goal_items: this.scenarioData.goal_items.map(item => ({
...item,
completed: false
})),
all_completed: false
}
}
} catch (error) {
console.error('Failed to load scenario data:', error)
}
},
showConversationComplete() {
this.addMessage('system', '🎉 Congratulations! You have completed all goals for this scenario!')
setTimeout(() => {
this.scrollToBottom()
}, 100)
},
resetGoals() {
if (this.scenarioData) {
this.goalStatus = {
scenario_goal: this.scenarioData.goal,
goal_items: this.scenarioData.goal_items.map(item => ({
...item,
completed: false
})),
all_completed: false
}
}
this.conversationComplete = false
this.isFinished = false
this.conversationFeedback = null
},
resetConversationOnScenarioChange() {
this.loadScenarioData()
this.messages = []
this.currentTranscription = ''
this.isTranscriptionFinal = false
this.conversationComplete = false
this.isFinished = false
this.conversationFeedback = null
this.hasRequestedInitialGreeting = false
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
const resetMessage = {
type: 'conversation_reset'
}
this.websocket.send(JSON.stringify(resetMessage))
}
this.showStartButton = true
},
async requestTranslation(message) {
message.loadingTranslation = true
try {
const response = await fetch('/api/translate', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
text: message.text,
source_language: this.config.translation.sourceLanguage,
target_language: this.config.translation.targetLanguage
})
})
if (!response.ok) {
throw new Error('Translation failed')
}
const data = await response.json()
message.englishTranslation = data.translation
message.showTranslation = true
} catch (error) {
console.error('Translation error:', error)
message.englishTranslation = 'Translation failed. Please try again.'
message.showTranslation = true
} finally {
message.loadingTranslation = false
}
},
hideTranslation(message) {
message.showTranslation = false
},
requestInitialGreeting() {
if (this.websocket && this.websocket.readyState === WebSocket.OPEN && !this.hasRequestedInitialGreeting) {
setTimeout(() => {
if (this.scenario) {
const greetingMessage = {
type: 'initial_greeting',
scenario_context: this.scenario
}
this.websocket.send(JSON.stringify(greetingMessage))
this.hasRequestedInitialGreeting = true
console.log('Sent initial greeting for scenario:', this.scenario)
} else {
console.log('No scenario set, retrying in 300ms')
setTimeout(() => this.requestInitialGreeting(), 300)
}
}, 200)
}
},
checkAndRequestGreeting() {
if (this.websocket && this.websocket.readyState === WebSocket.OPEN &&
this.scenario && !this.hasRequestedInitialGreeting) {
this.requestInitialGreeting()
}
},
startConversation() {
this.showStartButton = false
this.checkAndRequestGreeting()
},
checkAndShowTutorial() {
const hasSeenTutorial = localStorage.getItem('streetLingo_tutorialSeen')
if (!hasSeenTutorial) {
this.showTutorial = true
}
},
closeTutorial() {
this.showTutorial = false
localStorage.setItem('streetLingo_tutorialSeen', 'true')
},
setupAutoRecording(message) {
this.$nextTick(() => {
const audioElements = this.$refs.messagesContainer.querySelectorAll('audio')
const latestAudio = audioElements[audioElements.length - 1]
if (latestAudio) {
latestAudio.addEventListener('ended', () => {
setTimeout(() => {
if (!this.isRecording && !this.conversationComplete) {
this.startAutoRecording()
}
this.isAICurrentlySpeaking = false
}, 500)
this.startPauseDetection()
setTimeout(() => {
if (this.suggestionsEnabled && !this.showSuggestionPopup && !this.conversationComplete) {
this.showSuggestions()
}
}, 15000)
})
}
})
},
async startAutoRecording() {
try {
this.isAutoListening = true
await this.startRecording()
this.startVoiceActivityDetection()
} catch (error) {
console.error('Auto-recording failed:', error)
this.isAutoListening = false
}
},
startVoiceActivityDetection() {
let silenceTimer = null
let voiceDetected = false
const silenceThreshold = 3000
if (this.audioStream && this.audioContext) {
const source = this.audioContext.createMediaStreamSource(this.audioStream)
const analyzer = this.audioContext.createAnalyser()
analyzer.fftSize = 256
source.connect(analyzer)
const bufferLength = analyzer.frequencyBinCount
const dataArray = new Uint8Array(bufferLength)
const checkVoiceActivity = () => {
if (!this.isRecording) return
analyzer.getByteFrequencyData(dataArray)
const average = dataArray.reduce((a, b) => a + b) / bufferLength
const volumeThreshold = 15
if (average > volumeThreshold) {
voiceDetected = true
this.lastVoiceActivityTime = Date.now()
this.clearPauseTimer()
if (silenceTimer) {
clearTimeout(silenceTimer)
silenceTimer = null
}
} else if (voiceDetected) {
if (!silenceTimer) {
silenceTimer = setTimeout(() => {
if (this.isRecording) {
this.stopRecording()
if (!this.isAICurrentlySpeaking && voiceDetected && this.lastVoiceActivityTime) {
const timeSinceLastVoice = Date.now() - this.lastVoiceActivityTime
const timeSinceAIResponse = this.lastAIResponseTime ? Date.now() - this.lastAIResponseTime : Infinity
if (timeSinceLastVoice < 10000 && timeSinceAIResponse > 5000) {
this.showRecordingStoppedNotification = true
setTimeout(() => {
this.showRecordingStoppedNotification = false
}, 5000)
}
}
}
}, silenceThreshold)
}
if (!voiceDetected) {
this.startPauseDetection()
}
}
if (this.isRecording) {
requestAnimationFrame(checkVoiceActivity)
}
}
checkVoiceActivity()
}
},
getTranscriptionStatus() {
if (this.isTranscriptionFinal) {
return 'Final'
} else if (this.isAutoListening) {
return 'Auto-listening...'
} else {
return 'Listening...'
}
},
startPauseDetection() {
if (!this.suggestionsEnabled) {
return
}
this.clearPauseTimer()
this.pauseTimer = setTimeout(() => {
if (!this.conversationComplete && this.messages.length > 0 && !this.showSuggestionPopup) {
this.showSuggestions()
}
}, 20000)
},
clearPauseTimer() {
if (this.pauseTimer) {
clearTimeout(this.pauseTimer)
this.pauseTimer = null
}
},
async showSuggestions() {
if (!this.suggestionsEnabled || this.showSuggestionPopup) return
const timeSinceLastVoice = this.lastVoiceActivityTime ? Date.now() - this.lastVoiceActivityTime : Infinity
if (timeSinceLastVoice < 5000) {
return
}
try {
const requestData = {
language: this.config.code === 'de' ? 'german' : 'indonesian',
scenario: this.scenario,
conversation_history: this.messages.slice(-4).map(msg => ({
type: msg.type,
text: msg.text
}))
}
const response = await fetch(`${this.wsBaseUrl.replace('ws', 'http')}/api/suggestions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(requestData)
})
if (response.ok) {
const data = await response.json()
this.suggestionContent = data
this.showSuggestionPopup = true
this.$nextTick(() => {
if (this.isRecording) {
this.stopRecording()
setTimeout(() => {
this.startAutoRecording()
}, 500)
}
})
}
} catch (error) {
console.error('Error fetching suggestions:', error)
}
},
closeSuggestionPopup() {
this.showSuggestionPopup = false
this.suggestionContent = null
if (this.isRecording) {
this.stopRecording()
setTimeout(() => {
this.startAutoRecording()
}, 200)
}
},
async finishConversation() {
if (this.isRecording) {
this.stopRecording()
}
this.isFinished = true
this.isLoadingFeedback = true
if (this.showSuggestionPopup) {
this.closeSuggestionPopup()
}
try {
const response = await fetch(`${this.wsBaseUrl.replace('ws', 'http')}/api/conversation-feedback`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
language: this.config.code === 'de' ? 'german' : 'indonesian',
scenario: this.scenario,
conversation_history: this.messages.map(msg => ({
type: msg.type,
text: msg.text,
timestamp: msg.timestamp
}))
})
})
if (response.ok) {
const feedback = await response.json()
this.conversationFeedback = feedback
this.isLoadingFeedback = false
this.$nextTick(() => {
this.scrollToBottom()
})
} else {
this.conversationFeedback = {
encouragement: "Great job practicing! Every conversation helps you improve.",
suggestions: [],
examples: []
}
this.isLoadingFeedback = false
}
} catch (error) {
console.error('Error getting conversation feedback:', error)
this.conversationFeedback = {
encouragement: "Great job practicing! Every conversation helps you improve.",
suggestions: [],
examples: []
}
this.isLoadingFeedback = false
}
}
}
}
</script>
<style scoped>
@import '../styles/speech-interface.css';
</style>