YaserDS-777's picture
Update app.py
cdc1fda verified
raw
history blame
829 Bytes
import os
import gradio as gr
from transformers import pipeline
# Get the Hugging Face API token from environment variables
api_token = os.getenv("HUGGINGFACE_API_TOKEN")
# Initialize the text generation pipeline with authentication
pipe = pipeline("text-generation", model="meta-llama/Meta-Llama-3.1-405B", use_auth_token=api_token)
# Define the function to generate text
def generate_text(prompt):
result = pipe(prompt, max_length=50, num_return_sequences=1)
return result[0]['generated_text']
# Create a Gradio interface
iface = gr.Interface(
fn=generate_text,
inputs=gr.inputs.Textbox(lines=2, placeholder="Enter your prompt here..."),
outputs="text",
title="Meta-Llama Text Generation",
description="Generate text using the Meta-Llama 3.1 405B model."
)
# Launch the interface
iface.launch()