Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -68,7 +68,6 @@ class ModelManager:
|
|
68 |
print(f"Error loading model {model_name}: {e}")
|
69 |
self.models[model_name] = None
|
70 |
|
71 |
-
|
72 |
def get_model(self, model_name):
|
73 |
return self.models.get(model_name)
|
74 |
|
@@ -95,8 +94,12 @@ def remove_duplicates(text):
|
|
95 |
|
96 |
@lru_cache(maxsize=128)
|
97 |
def generate_model_response(model, inputs):
|
98 |
-
|
99 |
-
|
|
|
|
|
|
|
|
|
100 |
|
101 |
async def process_message(message):
|
102 |
inputs = normalize_input(message)
|
@@ -112,8 +115,6 @@ async def process_message(message):
|
|
112 |
model_name = global_data['model_configs'][i]['name']
|
113 |
responses[model_name] = future.result()
|
114 |
|
115 |
-
|
116 |
-
|
117 |
formatted_response = "\n\n".join([f"**{model}:**\n{response}" for model, response in responses.items()])
|
118 |
response_cache[inputs] = formatted_response
|
119 |
return formatted_response
|
|
|
68 |
print(f"Error loading model {model_name}: {e}")
|
69 |
self.models[model_name] = None
|
70 |
|
|
|
71 |
def get_model(self, model_name):
|
72 |
return self.models.get(model_name)
|
73 |
|
|
|
94 |
|
95 |
@lru_cache(maxsize=128)
|
96 |
def generate_model_response(model, inputs):
|
97 |
+
try:
|
98 |
+
response = model(inputs, max_tokens=150)
|
99 |
+
return remove_duplicates(response['choices'][0]['text'])
|
100 |
+
except Exception as e:
|
101 |
+
print(f"Error generating response from model: {e}")
|
102 |
+
return f"Error: Could not generate a response. Details: {e}"
|
103 |
|
104 |
async def process_message(message):
|
105 |
inputs = normalize_input(message)
|
|
|
115 |
model_name = global_data['model_configs'][i]['name']
|
116 |
responses[model_name] = future.result()
|
117 |
|
|
|
|
|
118 |
formatted_response = "\n\n".join([f"**{model}:**\n{response}" for model, response in responses.items()])
|
119 |
response_cache[inputs] = formatted_response
|
120 |
return formatted_response
|