File size: 772 Bytes
6b59ba7 |
1 2 3 4 5 6 7 8 9 10 11 |
```python
from modeling_deltalm import DeltalmForConditionalGeneration # modeling_deltalm: https://huggingface.co./nguyenvulebinh/deltalm-base/blob/main/modeling_deltalm.py
from configuration_deltalm import DeltalmConfig # configuration_deltalm: https://huggingface.co./nguyenvulebinh/deltalm-base/blob/main/configuration_deltalm.py
from transformers AutoTokenizer
src_text = "i'm steve and<mask> 25 years old"
encoded_hi = tokenizer(src_text, return_tensors="pt")
generated_output = model.generate(**encoded_hi, forced_bos_token_id=tokenizer.bos_token_id, max_length=20, num_beams=1, return_dict_in_generate=True, return_dict=True, output_hidden_states=True)
text_output = tokenizer.batch_decode(generated_output.sequences, skip_special_tokens=True)
print(text_output)
``` |