Spaces:
Sleeping
Sleeping
Kongongong
commited on
Commit
β’
9f71d7b
1
Parent(s):
9c39107
Update app/main.py
Browse files- app/main.py +2 -2
app/main.py
CHANGED
@@ -53,7 +53,7 @@ print("Ready.")
|
|
53 |
app = FastAPI(
|
54 |
title = "WLlma Thai Physics Finetuned API",
|
55 |
description="Llama For Physics Task Finetuned API for Thai Open-ended question answering.",
|
56 |
-
version="1.0.
|
57 |
)
|
58 |
|
59 |
origins = ["*"]
|
@@ -93,7 +93,7 @@ async def ask_Wphys(
|
|
93 |
if prompt:
|
94 |
try:
|
95 |
print(f'Asking Wllama-phys-8b with the question "{prompt}"')
|
96 |
-
result = ask_llm(
|
97 |
print(f"Result: {result}")
|
98 |
return QuestionResponse(answer=result, question=prompt, config={"temperature": temperature, "max_new_tokens": max_new_tokens})
|
99 |
except Exception as e:
|
|
|
53 |
app = FastAPI(
|
54 |
title = "WLlma Thai Physics Finetuned API",
|
55 |
description="Llama For Physics Task Finetuned API for Thai Open-ended question answering.",
|
56 |
+
version="1.0.1",
|
57 |
)
|
58 |
|
59 |
origins = ["*"]
|
|
|
93 |
if prompt:
|
94 |
try:
|
95 |
print(f'Asking Wllama-phys-8b with the question "{prompt}"')
|
96 |
+
result = ask_llm(prompt, max_new_tokens=max_new_tokens, temperature=temperature)
|
97 |
print(f"Result: {result}")
|
98 |
return QuestionResponse(answer=result, question=prompt, config={"temperature": temperature, "max_new_tokens": max_new_tokens})
|
99 |
except Exception as e:
|