Spaces:
Sleeping
Sleeping
Tuchuanhuhuhu
commited on
Commit
·
a8d9818
1
Parent(s):
e17e77b
fix [功能请求]: GPT-4 Turbo模型的Token长度限制似乎有问题? #953
Browse files
modules/models/OpenAI.py
CHANGED
@@ -25,8 +25,6 @@ class OpenAIClient(BaseLLMModel):
|
|
25 |
top_p=1.0,
|
26 |
user_name=""
|
27 |
) -> None:
|
28 |
-
if model_name in MODEL_METADATA:
|
29 |
-
model_name = MODEL_METADATA[model_name]["model_name"]
|
30 |
super().__init__(
|
31 |
model_name=model_name,
|
32 |
temperature=temperature,
|
|
|
25 |
top_p=1.0,
|
26 |
user_name=""
|
27 |
) -> None:
|
|
|
|
|
28 |
super().__init__(
|
29 |
model_name=model_name,
|
30 |
temperature=temperature,
|
modules/models/OpenAIVision.py
CHANGED
@@ -32,7 +32,7 @@ class OpenAIVisionClient(BaseLLMModel):
|
|
32 |
user_name=""
|
33 |
) -> None:
|
34 |
super().__init__(
|
35 |
-
model_name=
|
36 |
temperature=temperature,
|
37 |
top_p=top_p,
|
38 |
system_prompt=system_prompt,
|
|
|
32 |
user_name=""
|
33 |
) -> None:
|
34 |
super().__init__(
|
35 |
+
model_name=model_name,
|
36 |
temperature=temperature,
|
37 |
top_p=top_p,
|
38 |
system_prompt=system_prompt,
|
modules/models/base_model.py
CHANGED
@@ -213,7 +213,10 @@ class BaseLLMModel:
|
|
213 |
) -> None:
|
214 |
self.history = []
|
215 |
self.all_token_counts = []
|
216 |
-
|
|
|
|
|
|
|
217 |
self.model_type = ModelType.get_type(model_name)
|
218 |
try:
|
219 |
self.token_upper_limit = MODEL_METADATA[model_name]["token_limit"]
|
|
|
213 |
) -> None:
|
214 |
self.history = []
|
215 |
self.all_token_counts = []
|
216 |
+
if model_name in MODEL_METADATA:
|
217 |
+
self.model_name = MODEL_METADATA[model_name]["model_name"]
|
218 |
+
else:
|
219 |
+
self.model_name = model_name
|
220 |
self.model_type = ModelType.get_type(model_name)
|
221 |
try:
|
222 |
self.token_upper_limit = MODEL_METADATA[model_name]["token_limit"]
|