DarkAngel's picture
Update app.py
b2f4773 verified
import gradio as gr
from unsloth import FastLanguageModel
from transformers import TextStreamer
# Load the fine-tuned model and tokenizer
# model, tokenizer = FastLanguageModel.from_pretrained("lora_model")
from peft import PeftModel
from transformers import AutoModelForCausalLM, AutoTokenizer
base_model = AutoModelForCausalLM.from_pretrained("unsloth/Meta-Llama-3.1-8B-Instruct-bnb-4bit")
model = PeftModel.from_pretrained(base_model, "DarkAngel/gitallama")
tokenizer = AutoTokenizer.from_pretrained("unsloth/Meta-Llama-3.1-8B-Instruct-bnb-4bit")
tokenizer = AutoTokenizer.from_pretrained("unsloth/Meta-Llama-3.1-8B-Instruct-bnb-4bit")
def generate_response(shloka, transliteration):
"""
Generates the response using the fine-tuned LLaMA model.
"""
input_message = [
{
"role": "user",
"content": f"Shloka: {shloka} Transliteration: {transliteration}"
}
]
inputs = tokenizer.apply_chat_template(
input_message,
tokenize=True,
add_generation_prompt=True,
return_tensors="pt"
).to("cpu")
# Generate response
text_streamer = TextStreamer(tokenizer, skip_prompt=True)
generated_tokens = model.generate(
input_ids=inputs,
streamer=text_streamer,
max_new_tokens=512,
use_cache=True,
temperature=1.5,
min_p=0.1
)
raw_response = tokenizer.decode(generated_tokens[0], skip_special_tokens=True)
try:
sections = raw_response.split("Hindi Meaning:")
english_meaning = sections[0].strip()
hindi_and_word = sections[1].split("Word Meaning:")
hindi_meaning = hindi_and_word[0].strip()
word_meaning = hindi_and_word[1].strip()
formatted_response = (
f"English Meaning:\n{english_meaning}\n\n"
f"Hindi Meaning:\n{hindi_meaning}\n\n"
f"Word Meaning:\n{word_meaning}"
)
except IndexError:
formatted_response = raw_response
return formatted_response
interface = gr.Interface(
fn=generate_response,
inputs=[
gr.Textbox(label="Enter Shloka", placeholder="Type or paste a Shloka here"),
gr.Textbox(label="Enter Transliteration", placeholder="Type or paste the transliteration here")
],
outputs=gr.Textbox(label="Generated Response"),
title="Bhagavad Gita LLaMA Model",
description="Input a Shloka with its transliteration, and this model will provide meanings in English and Hindi along with word meanings."
)
# Launch the interface
if __name__ == "__main__":
interface.launch()