Gilvan's picture
Update app.py
e62171b verified
raw
history blame
1.27 kB
import os
from huggingface_hub import login
import torch
from transformers import pipeline
import gradio as gr
# Set the Hugging Face token from the environment variable
hf_token = os.getenv("HF_TOKEN")
if hf_token is None:
raise ValueError("Hugging Face token is not set in the environment variable.")
# Log in to Hugging Face with the token
login(token=hf_token)
# Define the model ID
model_id = "meta-llama/Llama-3.2-1B-Instruct"
# Load the pipeline with the model
pipe = pipeline(
"text-classification",
model=model_id,
torch_dtype=torch.bfloat16,
device_map="auto"
)
# Define custom labels for classification
pipe.model.config.id2label = {0: 'greeting', 1: 'farewell', 2: 'other'}
# Function to classify input text
def classify_text(text):
result = pipe(text)
return result[0]['label']
# Create Gradio interface
iface = gr.Interface(
fn=classify_text, # Function to be called
inputs=gr.Textbox(label="Enter Text"), # Textbox input for user
outputs=gr.Label(label="Classification"), # Output label showing classification
title="Text Classifier", # Title of the app
description="Classify your text as 'greeting', 'farewell', or 'other'." # Description of the app
)
# Launch the Gradio app
iface.launch()