KingNish commited on
Commit
d939569
1 Parent(s): a740b16

Update script1.js

Browse files
Files changed (1) hide show
  1. script1.js +12 -5
script1.js CHANGED
@@ -68,11 +68,14 @@ const processPrefetchQueue = async () => {
68
  const abortController = new AbortController();
69
  pendingPrefetchRequests.set(cacheKey, abortController);
70
 
 
 
71
  const url = '/stream_text';
72
  const requestBody = {
73
  query: query,
74
  history: JSON.stringify(conversationHistory),
75
- model: modelSelectionDropdown.value
 
76
  };
77
 
78
  try {
@@ -197,12 +200,13 @@ const sendQueryToAI = async (query) => {
197
  console.log("Sending query to AI:", query);
198
  isRequestInProgress = true;
199
  updateActivityIndicators();
200
- queryStartTime = Date.now();
201
  firstResponseTextTimestamp = null;
202
 
203
  const normalizedQuery = normalizeQueryText(query);
204
  const cacheKey = generateCacheKey(normalizedQuery, modelSelectionDropdown.value, conversationHistory, modelSelectionDropdown.value);
205
 
 
 
206
  if (prefetchCache.has(cacheKey)) {
207
  const cachedData = prefetchCache.get(cacheKey);
208
  if (Date.now() - cachedData.timestamp < prefetchCacheExpiration) {
@@ -216,11 +220,14 @@ const sendQueryToAI = async (query) => {
216
 
217
  requestAbortController = new AbortController();
218
 
 
 
219
  const url = '/stream_text';
220
  const requestBody = {
221
  query: query,
222
  history: JSON.stringify(conversationHistory),
223
- model: modelSelectionDropdown.value
 
224
  };
225
 
226
  try {
@@ -509,7 +516,7 @@ const cancelPrefetchRequests = (query) => {
509
  for (const [cacheKey, abortController] of pendingPrefetchRequests) {
510
  if (cacheKey.startsWith(normalizedQuery)) {
511
  abortController.abort();
512
- pendingPrefetchRequests.delete(cacheKey);pendingPrefetchRequests.delete(cacheKey);
513
  }
514
  }
515
  };
@@ -524,4 +531,4 @@ const updateLatency = () => {
524
  }
525
  };
526
 
527
- setInterval(updateLatency, 200);
 
68
  const abortController = new AbortController();
69
  pendingPrefetchRequests.set(cacheKey, abortController);
70
 
71
+ const userSambanovaKey = document.getElementById('apiKey').value.trim() !== '' ? document.getElementById('apiKey').value : 'none';
72
+
73
  const url = '/stream_text';
74
  const requestBody = {
75
  query: query,
76
  history: JSON.stringify(conversationHistory),
77
+ model: modelSelectionDropdown.value,
78
+ api_key: userSambanovaKey
79
  };
80
 
81
  try {
 
200
  console.log("Sending query to AI:", query);
201
  isRequestInProgress = true;
202
  updateActivityIndicators();
 
203
  firstResponseTextTimestamp = null;
204
 
205
  const normalizedQuery = normalizeQueryText(query);
206
  const cacheKey = generateCacheKey(normalizedQuery, modelSelectionDropdown.value, conversationHistory, modelSelectionDropdown.value);
207
 
208
+ queryStartTime = Date.now();
209
+
210
  if (prefetchCache.has(cacheKey)) {
211
  const cachedData = prefetchCache.get(cacheKey);
212
  if (Date.now() - cachedData.timestamp < prefetchCacheExpiration) {
 
220
 
221
  requestAbortController = new AbortController();
222
 
223
+ const userSambanovaKey = document.getElementById('apiKey').value.trim() !== '' ? document.getElementById('apiKey').value : 'none';
224
+
225
  const url = '/stream_text';
226
  const requestBody = {
227
  query: query,
228
  history: JSON.stringify(conversationHistory),
229
+ model: modelSelectionDropdown.value,
230
+ api_key: userSambanovaKey
231
  };
232
 
233
  try {
 
516
  for (const [cacheKey, abortController] of pendingPrefetchRequests) {
517
  if (cacheKey.startsWith(normalizedQuery)) {
518
  abortController.abort();
519
+ pendingPrefetchRequests.delete(cacheKey);
520
  }
521
  }
522
  };
 
531
  }
532
  };
533
 
534
+ setInterval(updateLatency, 100);