Spaces:
Runtime error
Runtime error
KvrParaskevi
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -38,7 +38,7 @@ def chat_interface(inputs):
|
|
38 |
|
39 |
#result = llm_chain({"input": query, "history": chat_history_tuples})
|
40 |
result = llm_chain.invoke(input = inputs, max_new_tokens = 50)
|
41 |
-
return result["
|
42 |
|
43 |
llm = load_pipeline()
|
44 |
chat_history = []
|
@@ -75,7 +75,7 @@ with gr.Blocks() as demo:
|
|
75 |
#message = gr.Textbox(label="Ask me a question!")
|
76 |
#clear = gr.Button("Clear")
|
77 |
#llm_chain, llm = init_chain(model, tokenizer)
|
78 |
-
chatbot_component = gr.Chatbot(height=300)
|
79 |
textbox_component = gr.Textbox(placeholder="Can I help you to book a hotel?", container=False, label = "input", scale=7)
|
80 |
|
81 |
demo.chatbot_interface = gr.Interface(
|
|
|
38 |
|
39 |
#result = llm_chain({"input": query, "history": chat_history_tuples})
|
40 |
result = llm_chain.invoke(input = inputs, max_new_tokens = 50)
|
41 |
+
return result["answer"]
|
42 |
|
43 |
llm = load_pipeline()
|
44 |
chat_history = []
|
|
|
75 |
#message = gr.Textbox(label="Ask me a question!")
|
76 |
#clear = gr.Button("Clear")
|
77 |
#llm_chain, llm = init_chain(model, tokenizer)
|
78 |
+
chatbot_component = gr.Chatbot(height=300, label = "history")
|
79 |
textbox_component = gr.Textbox(placeholder="Can I help you to book a hotel?", container=False, label = "input", scale=7)
|
80 |
|
81 |
demo.chatbot_interface = gr.Interface(
|