from transformers import pipeline def load_model(): """Loads the model from Hugging Face.""" model = pipeline("text-generation", model="gpt2") # Replace with your model return model def generate_response(prompt): """Generates a response using the model.""" model = load_model() response = model(prompt, max_length=100, do_sample=True) return response[0]["generated_text"]