File size: 626 Bytes
28aa259
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
# chatbot.py
from transformers import BlenderbotTokenizer, BlenderbotForConditionalGeneration

tokenizer = BlenderbotTokenizer.from_pretrained("facebook/blenderbot-400M-distill")
model = BlenderbotForConditionalGeneration.from_pretrained("facebook/blenderbot-400M-distill")

def generate_response(user_input, chat_history=[]):
    inputs = tokenizer([user_input], return_tensors='pt')
    chat_history_ids = model.generate(**inputs)
    bot_response = tokenizer.decode(chat_history_ids[0], skip_special_tokens=True)
    chat_history.append({"role": "assistant", "content": bot_response})
    return bot_response, chat_history