Spaces:
Paused
Paused
Commit
·
46a444c
1
Parent(s):
3304995
Update main.py
Browse files
main.py
CHANGED
@@ -6,8 +6,8 @@ from ctransformers import AutoModelForCausalLM
|
|
6 |
from fastapi.responses import HTMLResponse
|
7 |
from fastapi.middleware.cors import CORSMiddleware
|
8 |
from sse_starlette.sse import EventSourceResponse
|
9 |
-
from pydantic import BaseModel
|
10 |
-
|
11 |
from dialogue import DialogueTemplate
|
12 |
|
13 |
llm = AutoModelForCausalLM.from_pretrained("NeoDim/starchat-alpha-GGML",
|
@@ -116,12 +116,14 @@ async def chat(prompt = "<|user|> Write an express server with server sent event
|
|
116 |
return EventSourceResponse(server_sent_events(tokens, llm))
|
117 |
|
118 |
|
119 |
-
class
|
120 |
-
role:
|
121 |
-
|
|
|
|
|
122 |
|
123 |
class ChatCompletionRequest(BaseModel):
|
124 |
-
messages: List[
|
125 |
|
126 |
system_message = "Below is a conversation between a human user and a helpful AI coding assistant."
|
127 |
|
|
|
6 |
from fastapi.responses import HTMLResponse
|
7 |
from fastapi.middleware.cors import CORSMiddleware
|
8 |
from sse_starlette.sse import EventSourceResponse
|
9 |
+
from pydantic import BaseModel, Field
|
10 |
+
from typing_extensions import Literal
|
11 |
from dialogue import DialogueTemplate
|
12 |
|
13 |
llm = AutoModelForCausalLM.from_pretrained("NeoDim/starchat-alpha-GGML",
|
|
|
116 |
return EventSourceResponse(server_sent_events(tokens, llm))
|
117 |
|
118 |
|
119 |
+
class ChatCompletionRequestMessage(BaseModel):
|
120 |
+
role: Literal["system", "user", "assistant"] = Field(
|
121 |
+
default="user", description="The role of the message."
|
122 |
+
)
|
123 |
+
content: str = Field(default="", description="The content of the message.")
|
124 |
|
125 |
class ChatCompletionRequest(BaseModel):
|
126 |
+
messages: List[ChatCompletionRequestMessage]
|
127 |
|
128 |
system_message = "Below is a conversation between a human user and a helpful AI coding assistant."
|
129 |
|