Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -20,7 +20,7 @@ Llama_pipe = pipeline(
|
|
20 |
"text-generation",
|
21 |
model=model,
|
22 |
tokenizer=tokenizer,
|
23 |
-
max_new_tokens=
|
24 |
do_sample=True,
|
25 |
temperature=0.7,
|
26 |
top_p=0.95,
|
@@ -34,7 +34,7 @@ Rally: Sure.
|
|
34 |
User: I'm hungry right now. Do you know any Vietnamese food?"""
|
35 |
|
36 |
prompt_template = f"""<|im_start|>system
|
37 |
-
|
38 |
{history}
|
39 |
Rally:"""
|
40 |
print(Llama_pipe(prompt_template)[0]['generated_text'])
|
|
|
20 |
"text-generation",
|
21 |
model=model,
|
22 |
tokenizer=tokenizer,
|
23 |
+
max_new_tokens=40,
|
24 |
do_sample=True,
|
25 |
temperature=0.7,
|
26 |
top_p=0.95,
|
|
|
34 |
User: I'm hungry right now. Do you know any Vietnamese food?"""
|
35 |
|
36 |
prompt_template = f"""<|im_start|>system
|
37 |
+
Write one sentence to continue the conversation<|im_end|>
|
38 |
{history}
|
39 |
Rally:"""
|
40 |
print(Llama_pipe(prompt_template)[0]['generated_text'])
|