NLPToolkit / app.py
Canstralian's picture
Update app.py
1518493 verified
raw
history blame
5.47 kB
import gradio as gr
from huggingface_hub import InferenceClient
import langdetect
import json
# Initialize Hugging Face client with the new model
client = InferenceClient(model="OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5")
# Default system message to guide the assistant
default_system_message = (
"You are NLPToolkit Agent, an advanced assistant specializing in NLP tasks such as text summarization, "
"sentiment analysis, text classification, and entity recognition. Adapt your responses to the selected task."
)
# Predefined task-specific instructions
task_instructions = {
"Summarization": "Summarize the text clearly and concisely.",
"Sentiment Analysis": "Analyze the sentiment of the text (positive, neutral, negative).",
"Text Classification": "Classify the text into relevant categories.",
"Entity Recognition": "Identify and list named entities in the text."
}
# Preprocessing user input
def preprocess_text(text):
"""
Clean and validate the user's input text.
"""
try:
# Detect input language
language = langdetect.detect(text)
if language != "en":
return f"Input language detected as {language}. Please provide input in English."
except Exception:
return "Unable to detect language. Please provide valid text input."
return text.strip()
# Respond function for handling user input and generating a response
def respond(task, message, history, system_message, max_tokens, temperature, top_p):
"""
Handle user messages and generate responses using the NLP model.
"""
# Apply task-specific instructions
system_message = f"{system_message} Task: {task_instructions.get(task, 'General NLP task')}"
# Preprocess the user's input
message = preprocess_text(message)
if message.startswith("Input language detected") or message.startswith("Unable to detect"):
yield message
return
# Prepare conversation history
messages = [{"role": "system", "content": system_message}]
for user_message, assistant_message in history:
if user_message:
messages.append({"role": "user", "content": user_message})
if assistant_message:
messages.append({"role": "assistant", "content": assistant_message})
messages.append({"role": "user", "content": message})
response = ""
# Stream response from the Hugging Face model
try:
for chunk in client.chat_completion(
messages=messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = chunk.choices[0].delta.content
response += token
yield response
except Exception as e:
yield f"Error generating response: {str(e)}"
# Save conversation history to a JSON file
def save_history(history):
with open("chat_history.json", "w") as f:
json.dump(history, f)
return "Chat history saved successfully."
# Load conversation history from a JSON file
def load_history():
try:
with open("chat_history.json", "r") as f:
history = json.load(f)
return history
except FileNotFoundError:
return []
# Gradio app interface
def create_interface():
"""
Create the Gradio interface for the chatbot.
"""
with gr.Blocks() as demo:
gr.Markdown("## 🧠 NLPToolkit Agent\nAn advanced assistant for NLP tasks, powered by Hugging Face.")
with gr.Row():
# Task selection dropdown
task = gr.Dropdown(
choices=["Summarization", "Sentiment Analysis", "Text Classification", "Entity Recognition"],
value="Summarization",
label="Select NLP Task"
)
with gr.Row():
# User input and system message
user_input = gr.Textbox(label="Your Message", placeholder="Type your message here...")
system_message = gr.Textbox(value=default_system_message, label="System Message")
with gr.Row():
# Chat history and assistant response
chat_history = gr.State(value=[])
assistant_response = gr.Textbox(label="Assistant Response", interactive=False)
with gr.Row():
# Parameter sliders
max_tokens = gr.Slider(1, 2048, value=512, label="Max Tokens")
temperature = gr.Slider(0.1, 4.0, value=0.7, label="Temperature")
top_p = gr.Slider(0.1, 1.0, value=0.95, label="Top-p (Nucleus Sampling)")
with gr.Row():
# Buttons for save/load functionality
save_button = gr.Button("Save Chat History")
load_button = gr.Button("Load Chat History")
with gr.Row():
# Submit button
submit_button = gr.Button("Generate Response")
# Connect functionalities
submit_button.click(
fn=respond,
inputs=[task, user_input, chat_history, system_message, max_tokens, temperature, top_p],
outputs=assistant_response
)
save_button.click(fn=save_history, inputs=chat_history, outputs=None)
load_button.click(fn=load_history, inputs=None, outputs=chat_history)
gr.Markdown("### πŸš€ Powered by Hugging Face and Gradio | Developed by Canstralian")
return demo
# Run the app
if __name__ == "__main__":
demo = create_interface()
demo.launch()