MarioBarbeque commited on
Commit
86dd4bc
·
verified ·
1 Parent(s): 5757a29

update tokenizer in example

Browse files
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -71,7 +71,7 @@ import torch
71
  from transformers import T5ForConditionalGeneration, T5Tokenizer
72
 
73
  model = T5ForConditionalGeneration.from_pretrained("MarioBarbeque/CyberSolve-LinAlg-1.2").to("cuda")
74
- tokenizer = AutoTokenizer.from_pretrained("google/flan-t5-large") # CyberSolve uses the same tokenizer as the base FLAN-T5 model
75
 
76
  # Pass the model instruction to solve a linear equation in the following simple format
77
  input_text = "Solve 24 = 1601*c - 1605*c for c."
 
71
  from transformers import T5ForConditionalGeneration, T5Tokenizer
72
 
73
  model = T5ForConditionalGeneration.from_pretrained("MarioBarbeque/CyberSolve-LinAlg-1.2").to("cuda")
74
+ tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-large") # CyberSolve uses the same tokenizer as the base FLAN-T5 model
75
 
76
  # Pass the model instruction to solve a linear equation in the following simple format
77
  input_text = "Solve 24 = 1601*c - 1605*c for c."