Spaces:
Sleeping
Sleeping
Tuchuanhuhuhu
commited on
Commit
·
b91e1d8
1
Parent(s):
c87878a
不再自动总结对话,而是删除早期对话
Browse files- modules/chat_func.py +7 -15
modules/chat_func.py
CHANGED
@@ -406,23 +406,15 @@ def predict(
|
|
406 |
max_token = MODEL_SOFT_TOKEN_LIMIT[selected_model]["all"]
|
407 |
|
408 |
if sum(all_token_counts) > max_token and should_check_token_count:
|
409 |
-
|
|
|
|
|
|
|
|
|
|
|
410 |
logging.info(status_text)
|
|
|
411 |
yield chatbot, history, status_text, all_token_counts
|
412 |
-
iter = reduce_token_size(
|
413 |
-
openai_api_key,
|
414 |
-
system_prompt,
|
415 |
-
history,
|
416 |
-
chatbot,
|
417 |
-
all_token_counts,
|
418 |
-
top_p,
|
419 |
-
temperature,
|
420 |
-
max_token//2,
|
421 |
-
selected_model=selected_model,
|
422 |
-
)
|
423 |
-
for chatbot, history, status_text, all_token_counts in iter:
|
424 |
-
status_text = f"Token 达到上限,已自动降低Token计数至 {status_text}"
|
425 |
-
yield chatbot, history, status_text, all_token_counts
|
426 |
|
427 |
|
428 |
def retry(
|
|
|
406 |
max_token = MODEL_SOFT_TOKEN_LIMIT[selected_model]["all"]
|
407 |
|
408 |
if sum(all_token_counts) > max_token and should_check_token_count:
|
409 |
+
print(all_token_counts)
|
410 |
+
count = 0
|
411 |
+
while sum(all_token_counts) > max_token - 500 and sum(all_token_counts) > 0:
|
412 |
+
count += 1
|
413 |
+
del all_token_counts[0]
|
414 |
+
del history[:2]
|
415 |
logging.info(status_text)
|
416 |
+
status_text = f"为了防止token超限,模型忘记了早期的 {count} 轮对话"
|
417 |
yield chatbot, history, status_text, all_token_counts
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
418 |
|
419 |
|
420 |
def retry(
|