RagBenchCapstone10 / model.py
ssaiteja16's picture
Create model.py
44e0d8c verified
raw
history blame
402 Bytes
from transformers import pipeline
def load_model():
"""Loads the model from Hugging Face."""
model = pipeline("text-generation", model="gpt2") # Replace with your model
return model
def generate_response(prompt):
"""Generates a response using the model."""
model = load_model()
response = model(prompt, max_length=100, do_sample=True)
return response[0]["generated_text"]