Spaces:
Running
Running
Update script2.js
Browse files- script2.js +582 -1
script2.js
CHANGED
@@ -1 +1,582 @@
|
|
1 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
const startStopButton = document.getElementById('startStopButton');
|
2 |
+
const voiceSelectionDropdown = document.getElementById('voiceSelect');
|
3 |
+
const modelSelectionDropdown = document.getElementById('modelSelect');
|
4 |
+
const noiseSuppressionCheckbox = document.getElementById('noiseSuppression');
|
5 |
+
const responseTimeDisplay = document.getElementById('responseTime');
|
6 |
+
const userActivityIndicator = document.getElementById('userIndicator');
|
7 |
+
const aiActivityIndicator = document.getElementById('aiIndicator');
|
8 |
+
const transcriptDiv = document.getElementById('transcript');
|
9 |
+
|
10 |
+
let speechRecognizer;
|
11 |
+
let activeQuery = null;
|
12 |
+
let queryStartTime = 0;
|
13 |
+
let completeTranscript = '';
|
14 |
+
let isRequestInProgress = false;
|
15 |
+
let isUserSpeaking = false;
|
16 |
+
let isSpeechRecognitionActive = false;
|
17 |
+
let requestAbortController = null;
|
18 |
+
let partialTranscript = '';
|
19 |
+
let lastUserSpeechTimestamp = null;
|
20 |
+
let prefetchTextQuery = "";
|
21 |
+
let firstResponseTextTimestamp = null;
|
22 |
+
|
23 |
+
// Configuration
|
24 |
+
const USER_SPEECH_INTERRUPT_DELAY = 500;
|
25 |
+
const TEXT_TO_SPEECH_API_ENDPOINT = "https://api.streamelements.com/kappa/v2/speech";
|
26 |
+
const CHUNK_SIZE = 300;
|
27 |
+
|
28 |
+
// Audio Management
|
29 |
+
let currentAudio = null;
|
30 |
+
let audioPlaybackQueue = [];
|
31 |
+
let prefetchQueue = [];
|
32 |
+
|
33 |
+
// Enhanced Prefetching and Caching
|
34 |
+
const prefetchCache = new Map();
|
35 |
+
const pendingPrefetchRequests = new Map();
|
36 |
+
const MAX_PREFETCH_REQUESTS = 10;
|
37 |
+
const prefetchCacheExpiration = 60000; // 1 minute
|
38 |
+
|
39 |
+
// Global Conversation History
|
40 |
+
let conversationHistory = [];
|
41 |
+
|
42 |
+
// Audio Caching
|
43 |
+
const audioCache = new Map();
|
44 |
+
const audioCacheExpiration = 3600000; // 1 hour
|
45 |
+
|
46 |
+
// Normalize query text
|
47 |
+
const normalizeQueryText = query => query.trim().toLowerCase().replace(/[^\w\s]/g, '');
|
48 |
+
|
49 |
+
// Generate a cache key
|
50 |
+
const generateCacheKey = (normalizedQuery, voice, history, modelName) =>
|
51 |
+
`${normalizedQuery}-${voice}-${JSON.stringify(history)}-${modelName}`;
|
52 |
+
|
53 |
+
// Prefetch and cache the first TTS audio chunk
|
54 |
+
const prefetchFirstAudioChunk = (query, voice) => {
|
55 |
+
const normalizedQuery = normalizeQueryText(query);
|
56 |
+
const cacheKey = generateCacheKey(normalizedQuery, voice, conversationHistory, modelSelectionDropdown.value);
|
57 |
+
|
58 |
+
if (pendingPrefetchRequests.has(cacheKey) || prefetchCache.has(cacheKey)) return;
|
59 |
+
|
60 |
+
prefetchQueue.push({ query:query.trim(), voice, cacheKey });
|
61 |
+
processPrefetchQueue();
|
62 |
+
};
|
63 |
+
|
64 |
+
// Process the prefetch queue
|
65 |
+
const processPrefetchQueue = async () => {
|
66 |
+
while (prefetchQueue.length > 0 && pendingPrefetchRequests.size < MAX_PREFETCH_REQUESTS) {
|
67 |
+
const { query, voice, cacheKey } = prefetchQueue.shift();
|
68 |
+
const abortController = new AbortController();
|
69 |
+
pendingPrefetchRequests.set(cacheKey, abortController);
|
70 |
+
|
71 |
+
const url = '/stream_text';
|
72 |
+
const requestBody = {
|
73 |
+
query: query,
|
74 |
+
history: JSON.stringify(conversationHistory),
|
75 |
+
model: modelSelectionDropdown.value
|
76 |
+
};
|
77 |
+
|
78 |
+
try {
|
79 |
+
const response = await fetch(url, {
|
80 |
+
method: 'POST',
|
81 |
+
headers: {
|
82 |
+
'Accept': 'text/event-stream',
|
83 |
+
'Content-Type': 'application/json'
|
84 |
+
},
|
85 |
+
body: JSON.stringify(requestBody),
|
86 |
+
signal: abortController.signal
|
87 |
+
});
|
88 |
+
|
89 |
+
if (!response.ok) throw new Error('Network response was not ok');
|
90 |
+
|
91 |
+
const firstAudioUrl = await handleStreamingResponseForPrefetch(response.body, voice, abortController.signal);
|
92 |
+
|
93 |
+
if (firstAudioUrl) prefetchCache.set(cacheKey, { url: firstAudioUrl, timestamp: Date.now() });
|
94 |
+
|
95 |
+
} catch (error) {
|
96 |
+
if (error.name !== 'AbortError') console.error("Error prefetching audio:", error);
|
97 |
+
} finally {
|
98 |
+
pendingPrefetchRequests.delete(cacheKey);
|
99 |
+
processPrefetchQueue();
|
100 |
+
}
|
101 |
+
}
|
102 |
+
};
|
103 |
+
|
104 |
+
// Handle the streaming response for prefetching
|
105 |
+
const handleStreamingResponseForPrefetch = async (responseStream, voice, abortSignal) => {
|
106 |
+
const reader = responseStream.getReader();
|
107 |
+
const decoder = new TextDecoder("utf-8");
|
108 |
+
let buffer = "";
|
109 |
+
|
110 |
+
try {
|
111 |
+
while (true) {
|
112 |
+
const { done, value } = await reader.read();
|
113 |
+
if (done) break;
|
114 |
+
if (abortSignal.aborted) throw new DOMException('Request aborted', 'AbortError');
|
115 |
+
|
116 |
+
const chunk = decoder.decode(value, { stream: true });
|
117 |
+
buffer += chunk;
|
118 |
+
const lines = buffer.split('\n');
|
119 |
+
|
120 |
+
for (let i = 0; i < lines.length - 1; i++) {
|
121 |
+
const line = lines[i];
|
122 |
+
if (line.startsWith('data: ')) {
|
123 |
+
const textContent = line.substring(6).trim();
|
124 |
+
if (textContent) {
|
125 |
+
const audioUrl = await generateTextToSpeechAudio(textContent, voice);
|
126 |
+
return audioUrl;
|
127 |
+
}
|
128 |
+
}
|
129 |
+
}
|
130 |
+
|
131 |
+
buffer = lines[lines.length - 1];
|
132 |
+
}
|
133 |
+
} catch (error) {
|
134 |
+
console.error("Error in handleStreamingResponseForPrefetch:", error);
|
135 |
+
} finally {
|
136 |
+
reader.releaseLock();
|
137 |
+
}
|
138 |
+
|
139 |
+
return null;
|
140 |
+
};
|
141 |
+
|
142 |
+
// Play audio from the queue
|
143 |
+
const playNextAudio = async () => {
|
144 |
+
if (audioPlaybackQueue.length > 0) {
|
145 |
+
const audioData = audioPlaybackQueue.shift();
|
146 |
+
const audio = new Audio(audioData.url);
|
147 |
+
updateActivityIndicators();
|
148 |
+
|
149 |
+
// Pause speech recognition if it's active
|
150 |
+
if (isSpeechRecognitionActive) {
|
151 |
+
speechRecognizer.stop();
|
152 |
+
isSpeechRecognitionActive = false;
|
153 |
+
startStopButton.innerHTML = `
|
154 |
+
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
155 |
+
<path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
|
156 |
+
<path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
|
157 |
+
<line x1="12" y1="19" x2="12" y2="23"></line>
|
158 |
+
<line x1="8" y1="23" x2="16" y2="23"></line>
|
159 |
+
</svg>
|
160 |
+
Interrupt AI
|
161 |
+
`;
|
162 |
+
}
|
163 |
+
|
164 |
+
const audioPromise = new Promise(resolve => {
|
165 |
+
audio.onended = resolve;
|
166 |
+
audio.onerror = resolve;
|
167 |
+
});
|
168 |
+
if (currentAudio) {
|
169 |
+
currentAudio.pause();
|
170 |
+
currentAudio.currentTime = 0;
|
171 |
+
}
|
172 |
+
|
173 |
+
currentAudio = audio;
|
174 |
+
await audio.play();
|
175 |
+
await audioPromise;
|
176 |
+
playNextAudio();
|
177 |
+
} else {
|
178 |
+
updateActivityIndicators();
|
179 |
+
|
180 |
+
// Resume speech recognition if it was paused with a delay
|
181 |
+
setTimeout(() => {
|
182 |
+
if (!isSpeechRecognitionActive) {
|
183 |
+
speechRecognizer.start();
|
184 |
+
isSpeechRecognitionActive = true;
|
185 |
+
startStopButton.innerHTML = `
|
186 |
+
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
187 |
+
<path d="M9 9h6v6h-6z"></path>
|
188 |
+
<path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
|
189 |
+
<path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
|
190 |
+
<line x1="12" y1="19" x2="12" y2="23"></line>
|
191 |
+
<line x1="8" y1="23" x2="16" y2="23"></line>
|
192 |
+
</svg>
|
193 |
+
Stop Listening
|
194 |
+
`;
|
195 |
+
}
|
196 |
+
}, 100);
|
197 |
+
}
|
198 |
+
};
|
199 |
+
|
200 |
+
// Generate Text-to-Speech audio with caching
|
201 |
+
const generateTextToSpeechAudio = async (text, voice) => {
|
202 |
+
const normalizedText = normalizeQueryText(text);
|
203 |
+
const cacheKey = `${normalizedText}-${voice}`;
|
204 |
+
|
205 |
+
if (audioCache.has(cacheKey)) {
|
206 |
+
const cachedData = audioCache.get(cacheKey);
|
207 |
+
if (Date.now() - cachedData.timestamp < audioCacheExpiration) {
|
208 |
+
return cachedData.url;
|
209 |
+
} else {
|
210 |
+
audioCache.delete(cacheKey);
|
211 |
+
}
|
212 |
+
}
|
213 |
+
|
214 |
+
try {
|
215 |
+
const response = await fetch(`${TEXT_TO_SPEECH_API_ENDPOINT}?voice=${voice}&text=${encodeURIComponent(text)}`, { method: 'GET' });
|
216 |
+
if (!response.ok) throw new Error('Network response was not ok');
|
217 |
+
const audioBlob = await response.blob();
|
218 |
+
const audioUrl = URL.createObjectURL(audioBlob);
|
219 |
+
|
220 |
+
audioCache.set(cacheKey, { url: audioUrl, timestamp: Date.now() });
|
221 |
+
return audioUrl;
|
222 |
+
} catch (error) {
|
223 |
+
console.error("Error generating TTS audio:", error);
|
224 |
+
return null;
|
225 |
+
}
|
226 |
+
};
|
227 |
+
|
228 |
+
// Send a query to the AI
|
229 |
+
const sendQueryToAI = async (query) => {
|
230 |
+
console.log("Sending query to AI:", query);
|
231 |
+
isRequestInProgress = true;
|
232 |
+
updateActivityIndicators();
|
233 |
+
queryStartTime = Date.now();
|
234 |
+
firstResponseTextTimestamp = null;
|
235 |
+
|
236 |
+
const normalizedQuery = normalizeQueryText(query);
|
237 |
+
const cacheKey = generateCacheKey(normalizedQuery, modelSelectionDropdown.value, conversationHistory, modelSelectionDropdown.value);
|
238 |
+
|
239 |
+
if (prefetchCache.has(cacheKey)) {
|
240 |
+
const cachedData = prefetchCache.get(cacheKey);
|
241 |
+
if (Date.now() - cachedData.timestamp < prefetchCacheExpiration) {
|
242 |
+
const prefetchedAudioUrl = cachedData.url;
|
243 |
+
audioPlaybackQueue.push({ url: prefetchedAudioUrl, isPrefetched: true });
|
244 |
+
playNextAudio();
|
245 |
+
} else {
|
246 |
+
prefetchCache.delete(cacheKey);
|
247 |
+
}
|
248 |
+
}
|
249 |
+
|
250 |
+
requestAbortController = new AbortController();
|
251 |
+
|
252 |
+
const url = '/stream_text';
|
253 |
+
const requestBody = {
|
254 |
+
query: query,
|
255 |
+
history: JSON.stringify(conversationHistory),
|
256 |
+
model: modelSelectionDropdown.value
|
257 |
+
};
|
258 |
+
|
259 |
+
try {
|
260 |
+
const response = await fetch(url, {
|
261 |
+
method: 'POST',
|
262 |
+
headers: {
|
263 |
+
'Accept': 'text/event-stream',
|
264 |
+
'Content-Type': 'application/json'
|
265 |
+
},
|
266 |
+
body: JSON.stringify(requestBody),
|
267 |
+
signal: requestAbortController.signal
|
268 |
+
});
|
269 |
+
|
270 |
+
if (!response.ok) {
|
271 |
+
if (response.status === 429) {
|
272 |
+
console.log("Rate limit hit, retrying in 1 second...");
|
273 |
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
274 |
+
await sendQueryToAI(query);
|
275 |
+
return;
|
276 |
+
}
|
277 |
+
throw new Error(`Network response was not ok: ${response.status}`);
|
278 |
+
}
|
279 |
+
|
280 |
+
console.log("Streaming audio response received");
|
281 |
+
await handleStreamingResponse(response.body, voiceSelectionDropdown.value, requestAbortController.signal);
|
282 |
+
} catch (error) {
|
283 |
+
if (error.name !== 'AbortError') {
|
284 |
+
console.error("Error sending query to AI:", error);
|
285 |
+
}
|
286 |
+
} finally {
|
287 |
+
isRequestInProgress = false;
|
288 |
+
updateActivityIndicators();
|
289 |
+
}
|
290 |
+
};
|
291 |
+
|
292 |
+
// Handle the streaming audio response
|
293 |
+
const handleStreamingResponse = async (responseStream, voice, abortSignal) => {
|
294 |
+
const reader = responseStream.getReader();
|
295 |
+
const decoder = new TextDecoder("utf-8");
|
296 |
+
let buffer = "";
|
297 |
+
let initialChunksSent = 0;
|
298 |
+
let fullResponseText = "";
|
299 |
+
let textChunk = "";
|
300 |
+
let sentText = "";
|
301 |
+
|
302 |
+
try {
|
303 |
+
while (true) {
|
304 |
+
const { done, value } = await reader.read();
|
305 |
+
if (done) break;
|
306 |
+
if (abortSignal.aborted) throw new DOMException('Request aborted', 'AbortError');
|
307 |
+
|
308 |
+
if (isUserSpeaking) {
|
309 |
+
interruptAudioPlayback('user is speaking');
|
310 |
+
break;
|
311 |
+
}
|
312 |
+
|
313 |
+
const chunk = decoder.decode(value, { stream: true });
|
314 |
+
buffer += chunk;
|
315 |
+
const lines = buffer.split('\n');
|
316 |
+
|
317 |
+
for (let i = 0; i < lines.length - 1; i++) {
|
318 |
+
const line = lines[i];
|
319 |
+
if (line.startsWith('data: ')) {
|
320 |
+
const textContent = line.substring(6).trim();
|
321 |
+
if (textContent) {
|
322 |
+
if (!firstResponseTextTimestamp) firstResponseTextTimestamp = Date.now();
|
323 |
+
|
324 |
+
fullResponseText += textContent + " ";
|
325 |
+
textChunk += textContent + " ";
|
326 |
+
transcriptDiv.textContent = fullResponseText; // Update transcriptDiv
|
327 |
+
|
328 |
+
if (initialChunksSent < 2) {
|
329 |
+
const audioUrl = await generateTextToSpeechAudio(textContent, voice);
|
330 |
+
if (audioUrl) {
|
331 |
+
audioPlaybackQueue.push({ url: audioUrl, isPrefetched: false });
|
332 |
+
if (!currentAudio) playNextAudio();
|
333 |
+
}
|
334 |
+
sentText += textContent + " ";
|
335 |
+
initialChunksSent++;
|
336 |
+
} else {
|
337 |
+
let unsentTextChunk = textChunk.replace(sentText, '').trim();
|
338 |
+
|
339 |
+
if (unsentTextChunk.length >= CHUNK_SIZE) {
|
340 |
+
const audioUrl = await generateTextToSpeechAudio(unsentTextChunk, voice);
|
341 |
+
if (audioUrl) {
|
342 |
+
audioPlaybackQueue.push({ url: audioUrl, isPrefetched: false });
|
343 |
+
if (!currentAudio) playNextAudio();
|
344 |
+
}
|
345 |
+
textChunk = "";
|
346 |
+
}
|
347 |
+
}
|
348 |
+
}
|
349 |
+
}
|
350 |
+
}
|
351 |
+
|
352 |
+
buffer = lines[lines.length - 1];
|
353 |
+
}
|
354 |
+
} catch (error) {
|
355 |
+
console.error("Error in handleStreamingResponse:", error);
|
356 |
+
} finally {
|
357 |
+
reader.releaseLock();
|
358 |
+
|
359 |
+
let unsentTextChunk = textChunk.replace(sentText, '').trim();
|
360 |
+
if (unsentTextChunk !== "") {
|
361 |
+
const audioUrl = await generateTextToSpeechAudio(unsentTextChunk, voice);
|
362 |
+
if (audioUrl) {
|
363 |
+
audioPlaybackQueue.push({ url: audioUrl, isPrefetched: false });
|
364 |
+
if (!currentAudio) playNextAudio();
|
365 |
+
}
|
366 |
+
}
|
367 |
+
|
368 |
+
if (fullResponseText !== '') {
|
369 |
+
addToConversationHistory('assistant', fullResponseText);
|
370 |
+
fullResponseText = ''; // Clear fullResponseText for the next response
|
371 |
+
}
|
372 |
+
}
|
373 |
+
};
|
374 |
+
|
375 |
+
// Update activity indicators
|
376 |
+
const updateActivityIndicators = (state = null) => {
|
377 |
+
userActivityIndicator.textContent = isUserSpeaking ? "User: Speaking" : "User: Idle";
|
378 |
+
userActivityIndicator.className = isUserSpeaking
|
379 |
+
? "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-blue-400 to-blue-600 hover:bg-gradient-to-r from-blue-500 to-blue-700"
|
380 |
+
: "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-gray-300 to-gray-400 dark:from-gray-700 dark:to-gray-800 hover:bg-gradient-to-r from-gray-400 to-gray-500"; // Tailwind classes
|
381 |
+
|
382 |
+
if (isRequestInProgress && !currentAudio) {
|
383 |
+
aiActivityIndicator.textContent = "AI: Processing...";
|
384 |
+
aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-purple-400 to-purple-600 hover:bg-gradient-to-r from-purple-500 to-purple-700"; // Tailwind class for thinking
|
385 |
+
} else if (currentAudio && !isUserSpeaking) {
|
386 |
+
aiActivityIndicator.textContent = state || "AI: Speaking";
|
387 |
+
aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-green-400 to-green-600 hover:bg-gradient-to-r from-green-500 to-green-700"; // Tailwind class for speaking
|
388 |
+
} else if (isUserSpeaking) {
|
389 |
+
aiActivityIndicator.textContent = "AI: Listening";
|
390 |
+
aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-yellow-400 to-yellow-600 hover:bg-gradient-to-r from-yellow-500 to-yellow-700"; // Tailwind class for listening
|
391 |
+
} else {
|
392 |
+
aiActivityIndicator.textContent = "AI: Idle";
|
393 |
+
aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-gray-300 to-gray-400 dark:from-gray-700 dark:to-gray-800 hover:bg-gradient-to-r from-gray-400 to-gray-500"; // Tailwind classes
|
394 |
+
}
|
395 |
+
};
|
396 |
+
|
397 |
+
// Initialize speech recognition
|
398 |
+
if ('webkitSpeechRecognition' in window) {
|
399 |
+
speechRecognizer = new webkitSpeechRecognition();
|
400 |
+
Object.assign(speechRecognizer, {
|
401 |
+
continuous: true,
|
402 |
+
interimResults: true,
|
403 |
+
language: 'en-US',
|
404 |
+
maxAlternatives: 3
|
405 |
+
});
|
406 |
+
|
407 |
+
speechRecognizer.onstart = () => {
|
408 |
+
console.log("Speech recognition started");
|
409 |
+
completeTranscript = '';
|
410 |
+
isUserSpeaking = true;
|
411 |
+
lastUserSpeechTimestamp = Date.now();
|
412 |
+
updateActivityIndicators();
|
413 |
+
startStopButton.innerHTML = `
|
414 |
+
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
415 |
+
<path d="M9 9h6v6h-6z"></path>
|
416 |
+
<path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
|
417 |
+
<path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
|
418 |
+
<line x1="12" y1="19" x2="12" y2="23"></line>
|
419 |
+
<line x1="8" y1="23" x2="16" y2="23"></line>
|
420 |
+
</svg>
|
421 |
+
Stop Listening
|
422 |
+
`;
|
423 |
+
};
|
424 |
+
|
425 |
+
speechRecognizer.onresult = (event) => {
|
426 |
+
let interimTranscript = '';
|
427 |
+
for (let i = event.resultIndex; i < event.results.length; i++) {
|
428 |
+
const transcript = event.results[i][0].transcript;
|
429 |
+
if (event.results[i].isFinal) {
|
430 |
+
completeTranscript += transcript;
|
431 |
+
interruptAudioPlayback('final');
|
432 |
+
processSpeechTranscript(completeTranscript);
|
433 |
+
completeTranscript = '';
|
434 |
+
isUserSpeaking = false;
|
435 |
+
updateActivityIndicators();
|
436 |
+
queryStartTime = Date.now();
|
437 |
+
} else {
|
438 |
+
interimTranscript += transcript;
|
439 |
+
isUserSpeaking = true;
|
440 |
+
lastUserSpeechTimestamp = Date.now();
|
441 |
+
updateActivityIndicators();
|
442 |
+
|
443 |
+
if (interimTranscript.length > prefetchTextQuery.length + 5) {
|
444 |
+
cancelPrefetchRequests(prefetchTextQuery);
|
445 |
+
}
|
446 |
+
prefetchTextQuery = interimTranscript;
|
447 |
+
prefetchFirstAudioChunk(interimTranscript, voiceSelectionDropdown.value);
|
448 |
+
}
|
449 |
+
}
|
450 |
+
};
|
451 |
+
|
452 |
+
speechRecognizer.onerror = (event) => {
|
453 |
+
console.error('Speech recognition error:', event.error);
|
454 |
+
if (isSpeechRecognitionActive) speechRecognizer.start();
|
455 |
+
};
|
456 |
+
|
457 |
+
speechRecognizer.onend = () => {
|
458 |
+
isUserSpeaking = false;
|
459 |
+
updateActivityIndicators();
|
460 |
+
|
461 |
+
if (!isRequestInProgress && completeTranscript !== '') {
|
462 |
+
processSpeechTranscript(completeTranscript);
|
463 |
+
completeTranscript = '';
|
464 |
+
}
|
465 |
+
|
466 |
+
if (isSpeechRecognitionActive) speechRecognizer.start();
|
467 |
+
};
|
468 |
+
|
469 |
+
startStopButton.addEventListener('click', () => {
|
470 |
+
if (isSpeechRecognitionActive && !isRequestInProgress) { // Stop Listening
|
471 |
+
speechRecognizer.stop();
|
472 |
+
isSpeechRecognitionActive = false;
|
473 |
+
startStopButton.innerHTML = `
|
474 |
+
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
475 |
+
<path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
|
476 |
+
<path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
|
477 |
+
<line x1="12" y1="19" x2="12" y2="23"></line>
|
478 |
+
<line x1="8" y1="23" x2="16" y2="23"></line>
|
479 |
+
</svg>
|
480 |
+
Start Listening
|
481 |
+
`;
|
482 |
+
} else if (isSpeechRecognitionActive && isRequestInProgress || currentAudio) { // Interrupt AI
|
483 |
+
interruptAudioPlayback('button interrupt');
|
484 |
+
speechRecognizer.start();
|
485 |
+
isSpeechRecognitionActive = true; // Keep recognition active
|
486 |
+
startStopButton.innerHTML = `
|
487 |
+
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
488 |
+
<path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
|
489 |
+
<path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
|
490 |
+
<line x1="12" y1="19" x2="12" y2="23"></line>
|
491 |
+
<line x1="8" y1="23" x2="16" y2="23"></line>
|
492 |
+
</svg>
|
493 |
+
Interrupt AI
|
494 |
+
`; // Replace with your SVG
|
495 |
+
} else { // Start Listening
|
496 |
+
speechRecognizer.start();
|
497 |
+
isSpeechRecognitionActive = true;
|
498 |
+
startStopButton.innerHTML = `
|
499 |
+
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
500 |
+
<path d="M9 9h6v6h-6z"></path>
|
501 |
+
<path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
|
502 |
+
<path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
|
503 |
+
<line x1="12" y1="19" x2="12" y2="23"></line>
|
504 |
+
<line x1="8" y1="23" x2="16" y2="23"></line>
|
505 |
+
</svg>
|
506 |
+
Stop Listening
|
507 |
+
`; // Replace with your SVG
|
508 |
+
}
|
509 |
+
});
|
510 |
+
} else {
|
511 |
+
alert('Your browser does not support the Web Speech API.');
|
512 |
+
}
|
513 |
+
|
514 |
+
// Add to conversation history
|
515 |
+
const addToConversationHistory = (role, content) => {
|
516 |
+
if (conversationHistory.length > 0 &&
|
517 |
+
conversationHistory[conversationHistory.length - 1].role === 'assistant' &&
|
518 |
+
conversationHistory[conversationHistory.length - 1].content === "") {
|
519 |
+
conversationHistory.pop();
|
520 |
+
}
|
521 |
+
|
522 |
+
conversationHistory.push({ role, content });
|
523 |
+
|
524 |
+
if (conversationHistory.length > 6) conversationHistory.splice(0, 2);
|
525 |
+
};
|
526 |
+
|
527 |
+
// Process the final speech transcript
|
528 |
+
const processSpeechTranscript = (transcript) => {
|
529 |
+
const trimmedTranscript = transcript.trimStart();
|
530 |
+
if (trimmedTranscript !== '' && !isRequestInProgress) {
|
531 |
+
activeQuery = trimmedTranscript;
|
532 |
+
sendQueryToAI(activeQuery);
|
533 |
+
addToConversationHistory('user', activeQuery);
|
534 |
+
transcriptDiv.textContent = '';
|
535 |
+
}
|
536 |
+
};
|
537 |
+
|
538 |
+
// Interrupt audio playback
|
539 |
+
const interruptAudioPlayback = (reason = 'unknown') => {
|
540 |
+
console.log(`Interrupting audio (reason: ${reason})...`);
|
541 |
+
if (currentAudio) {
|
542 |
+
currentAudio.pause();
|
543 |
+
currentAudio.currentTime = 0;
|
544 |
+
currentAudio = null;
|
545 |
+
}
|
546 |
+
|
547 |
+
audioPlaybackQueue.length = 0;
|
548 |
+
isRequestInProgress = false;
|
549 |
+
|
550 |
+
if (requestAbortController) {
|
551 |
+
requestAbortController.abort();
|
552 |
+
requestAbortController = null;
|
553 |
+
}
|
554 |
+
|
555 |
+
prefetchCache.clear();
|
556 |
+
prefetchQueue.length = 0;
|
557 |
+
updateActivityIndicators();
|
558 |
+
};
|
559 |
+
|
560 |
+
// Cancel pending prefetch requests
|
561 |
+
const cancelPrefetchRequests = (query) => {
|
562 |
+
const normalizedQuery = normalizeQueryText(query);
|
563 |
+
|
564 |
+
for (const [cacheKey, abortController] of pendingPrefetchRequests) {
|
565 |
+
if (cacheKey.startsWith(normalizedQuery)) {
|
566 |
+
abortController.abort();
|
567 |
+
pendingPrefetchRequests.delete(cacheKey);
|
568 |
+
}
|
569 |
+
}
|
570 |
+
};
|
571 |
+
|
572 |
+
// Update latency display
|
573 |
+
const updateLatency = () => {
|
574 |
+
if (firstResponseTextTimestamp) {
|
575 |
+
const latency = firstResponseTextTimestamp - queryStartTime;
|
576 |
+
responseTimeDisplay.textContent = `Latency: ${latency}ms`;
|
577 |
+
} else {
|
578 |
+
responseTimeDisplay.textContent = "Latency: 0ms";
|
579 |
+
}
|
580 |
+
};
|
581 |
+
|
582 |
+
setInterval(updateLatency, 200);
|