Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -7,16 +7,17 @@ from langchain_huggingface import HuggingFaceEndpoint
|
|
7 |
# Initialize the LLM and other components
|
8 |
llm = HuggingFaceEndpoint(
|
9 |
repo_id="mistralai/Mistral-7B-Instruct-v0.3",
|
10 |
-
task="text-
|
11 |
max_new_tokens=64,
|
12 |
temperature=0.5,
|
13 |
do_sample=False,
|
14 |
)
|
15 |
# Define the function to process user input
|
16 |
def classify_text(text):
|
17 |
-
prompt = f"""
|
18 |
Text: {text.strip()}
|
19 |
-
|
|
|
20 |
|
21 |
# Invoke the model with the refined prompt
|
22 |
results = llm.invoke(prompt).strip()
|
|
|
7 |
# Initialize the LLM and other components
|
8 |
llm = HuggingFaceEndpoint(
|
9 |
repo_id="mistralai/Mistral-7B-Instruct-v0.3",
|
10 |
+
task="text-generation",
|
11 |
max_new_tokens=64,
|
12 |
temperature=0.5,
|
13 |
do_sample=False,
|
14 |
)
|
15 |
# Define the function to process user input
|
16 |
def classify_text(text):
|
17 |
+
prompt = f"""Your mission is to classify the following text into a category or topic.
|
18 |
Text: {text.strip()}
|
19 |
+
You dont need to write specific informations or explanations, only return the categories.
|
20 |
+
"""
|
21 |
|
22 |
# Invoke the model with the refined prompt
|
23 |
results = llm.invoke(prompt).strip()
|