matthoffner
commited on
Commit
•
e177246
1
Parent(s):
b11f460
Update main.py
Browse files
main.py
CHANGED
@@ -69,23 +69,7 @@ async def chat(request: ChatCompletionRequest):
|
|
69 |
except Exception as e:
|
70 |
raise HTTPException(status_code=500, detail=str(e))
|
71 |
|
72 |
-
|
73 |
-
for chat_chunk in chat_chunks:
|
74 |
-
response = {
|
75 |
-
'choices': [
|
76 |
-
{
|
77 |
-
'message': {
|
78 |
-
'role': 'system',
|
79 |
-
'content': llm.detokenize(chat_chunk)
|
80 |
-
},
|
81 |
-
'finish_reason': 'stop' if llm.detokenize(chat_chunk) == "[DONE]" else 'unknown'
|
82 |
-
}
|
83 |
-
]
|
84 |
-
}
|
85 |
-
yield f"data: {json.dumps(response)}\n\n"
|
86 |
-
yield "event: done\ndata: {}\n\n"
|
87 |
-
|
88 |
-
return StreamingResponse(format_response(chat_chunks), media_type="text/event-stream")
|
89 |
|
90 |
async def stream_response(tokens: Any) -> None:
|
91 |
try:
|
|
|
69 |
except Exception as e:
|
70 |
raise HTTPException(status_code=500, detail=str(e))
|
71 |
|
72 |
+
return StreamingResponse(generate_response(chat_chunks, llm), media_type="text/event-stream")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
73 |
|
74 |
async def stream_response(tokens: Any) -> None:
|
75 |
try:
|