Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import pipeline, AutoModelForQuestionAnswering, AutoTokenizer | |
import json | |
# Load the dataset | |
with open('faq_dataset.json') as f: | |
faq_data = json.load(f) | |
# Initialize the model and tokenizer | |
model_name = "distilbert-base-uncased-distilled-squad" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForQuestionAnswering.from_pretrained(model_name) | |
nlp = pipeline("question-answering", model=model, tokenizer=tokenizer) | |
# Create a function to get answers | |
def get_answer(question): | |
for item in faq_data: | |
context = item['answer'] | |
result = nlp(question=question, context=context) | |
if result['score'] > 0.5: | |
return result['answer'] | |
return "Sorry, I don't know the answer to that question." | |
# Create the Gradio interface | |
iface = gr.Interface( | |
fn=get_answer, | |
inputs=gr.inputs.Textbox(label="Ask a Question"), | |
outputs=gr.outputs.Textbox(label="Answer"), | |
title="FAQ Chatbot", | |
description="Ask a question and get an answer from the FAQ dataset." | |
) | |
# Launch the interface | |
iface.launch() | |