Spaces:
Sleeping
Sleeping
Tuchuanhuhuhu
commited on
Commit
·
efd54c9
1
Parent(s):
e99bd71
bugfix: corrupted history may exceed token limit fix #916
Browse files- ChuanhuChatbot.py +6 -5
- modules/models/OpenAI.py +1 -1
- modules/models/base_model.py +4 -0
- modules/utils.py +0 -3
ChuanhuChatbot.py
CHANGED
@@ -1,4 +1,10 @@
|
|
1 |
# -*- coding:utf-8 -*-
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
from modules.models.models import get_model
|
3 |
from modules.train_func import *
|
4 |
from modules.repo import *
|
@@ -10,11 +16,6 @@ from modules.config import *
|
|
10 |
from modules import config
|
11 |
import gradio as gr
|
12 |
import colorama
|
13 |
-
import logging
|
14 |
-
logging.basicConfig(
|
15 |
-
level=logging.INFO,
|
16 |
-
format="%(asctime)s [%(levelname)s] [%(filename)s:%(lineno)d] %(message)s",
|
17 |
-
)
|
18 |
|
19 |
|
20 |
logging.getLogger("httpx").setLevel(logging.WARNING)
|
|
|
1 |
# -*- coding:utf-8 -*-
|
2 |
+
import logging
|
3 |
+
logging.basicConfig(
|
4 |
+
level=logging.INFO,
|
5 |
+
format="%(asctime)s [%(levelname)s] [%(filename)s:%(lineno)d] %(message)s",
|
6 |
+
)
|
7 |
+
|
8 |
from modules.models.models import get_model
|
9 |
from modules.train_func import *
|
10 |
from modules.repo import *
|
|
|
16 |
from modules import config
|
17 |
import gradio as gr
|
18 |
import colorama
|
|
|
|
|
|
|
|
|
|
|
19 |
|
20 |
|
21 |
logging.getLogger("httpx").setLevel(logging.WARNING)
|
modules/models/OpenAI.py
CHANGED
@@ -217,7 +217,7 @@ class OpenAIClient(BaseLLMModel):
|
|
217 |
except:
|
218 |
print(f"ERROR: {chunk}")
|
219 |
continue
|
220 |
-
if error_msg:
|
221 |
raise Exception(error_msg)
|
222 |
|
223 |
def set_key(self, new_access_key):
|
|
|
217 |
except:
|
218 |
print(f"ERROR: {chunk}")
|
219 |
continue
|
220 |
+
if error_msg and not error_msg=="data: [DONE]":
|
221 |
raise Exception(error_msg)
|
222 |
|
223 |
def set_key(self, new_access_key):
|
modules/models/base_model.py
CHANGED
@@ -747,6 +747,10 @@ class BaseLLMModel:
|
|
747 |
logging.info(new_history)
|
748 |
except:
|
749 |
pass
|
|
|
|
|
|
|
|
|
750 |
logging.debug(f"{self.user_identifier} 加载对话历史完毕")
|
751 |
self.history = json_s["history"]
|
752 |
return os.path.basename(self.history_file_path), json_s["system"], json_s["chatbot"]
|
|
|
747 |
logging.info(new_history)
|
748 |
except:
|
749 |
pass
|
750 |
+
if len(json_s["chatbot"]) < len(json_s["history"]):
|
751 |
+
logging.info("Trimming corrupted history...")
|
752 |
+
json_s["history"] = json_s["history"][-len(json_s["chatbot"]):]
|
753 |
+
logging.info(f"Trimmed history: {json_s['history']}")
|
754 |
logging.debug(f"{self.user_identifier} 加载对话历史完毕")
|
755 |
self.history = json_s["history"]
|
756 |
return os.path.basename(self.history_file_path), json_s["system"], json_s["chatbot"]
|
modules/utils.py
CHANGED
@@ -77,9 +77,6 @@ def auto_name_chat_history(current_model, *args):
|
|
77 |
def export_markdown(current_model, *args):
|
78 |
return current_model.export_markdown(*args)
|
79 |
|
80 |
-
def load_chat_history(current_model, *args):
|
81 |
-
return current_model.load_chat_history(*args)
|
82 |
-
|
83 |
def upload_chat_history(current_model, *args):
|
84 |
return current_model.load_chat_history(*args)
|
85 |
|
|
|
77 |
def export_markdown(current_model, *args):
|
78 |
return current_model.export_markdown(*args)
|
79 |
|
|
|
|
|
|
|
80 |
def upload_chat_history(current_model, *args):
|
81 |
return current_model.load_chat_history(*args)
|
82 |
|