test.wow / test.py
SUSSYMANBI's picture
asd
a3af4de
raw
history blame contribute delete
551 Bytes
from transformers import GPT2Tokenizer, GPT2Model
# Load the tokenizer and model
tokenizer = GPT2Tokenizer.from_pretrained('gpt2')
model = GPT2Model.from_pretrained('gpt2')
# Create a prompt
prompt = "I need a hug."
# Tokenize the prompt
inputs = tokenizer.encode(prompt, return_tensors='pt')
# Generate text using the model
outputs = model.generate(inputs, max_length=50, num_return_sequences=1)
# Decode the generated text
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
# Print the generated text
print(generated_text)