Update README.md
Browse files
README.md
CHANGED
@@ -27,7 +27,7 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
27 |
tokenizer = AutoTokenizer.from_pretrained("Simbolo-Servicio/Myanmarsar-GPT")
|
28 |
model = AutoModelForCausalLM.from_pretrained("Simbolo-Servicio/Myanmarsar-GPT")
|
29 |
|
30 |
-
input_text = "
|
31 |
input_ids = tokenizer.encode(input_text, return_tensors='pt')
|
32 |
output = model.generate(input_ids, max_length=50)
|
33 |
print(tokenizer.decode(output[0], skip_special_tokens=True))
|
|
|
27 |
tokenizer = AutoTokenizer.from_pretrained("Simbolo-Servicio/Myanmarsar-GPT")
|
28 |
model = AutoModelForCausalLM.from_pretrained("Simbolo-Servicio/Myanmarsar-GPT")
|
29 |
|
30 |
+
input_text = "ပညာရေး"
|
31 |
input_ids = tokenizer.encode(input_text, return_tensors='pt')
|
32 |
output = model.generate(input_ids, max_length=50)
|
33 |
print(tokenizer.decode(output[0], skip_special_tokens=True))
|