Update README.md
Browse files
README.md
CHANGED
@@ -19,12 +19,16 @@ Wenzhong-GPT2-110M is one of the Wenzhong series, which has smaller parameters.
|
|
19 |
## Usage
|
20 |
|
21 |
### load model
|
|
|
22 |
```python
|
23 |
from transformers import GPT2Tokenizer,GPT2LMHeadModel
|
24 |
hf_model_path = 'IDEA-CCNL/Wenzhong-GPT2-110M'
|
25 |
tokenizer = GPT2Tokenizer.from_pretrained(hf_model_path)
|
26 |
-
model = GPT2LMHeadModel.from_pretrained(hf_model_path)
|
|
|
|
|
27 |
### generation
|
|
|
28 |
```python
|
29 |
question = "北京是中国的"
|
30 |
inputs = tokenizer(question,return_tensors='pt')
|
|
|
19 |
## Usage
|
20 |
|
21 |
### load model
|
22 |
+
|
23 |
```python
|
24 |
from transformers import GPT2Tokenizer,GPT2LMHeadModel
|
25 |
hf_model_path = 'IDEA-CCNL/Wenzhong-GPT2-110M'
|
26 |
tokenizer = GPT2Tokenizer.from_pretrained(hf_model_path)
|
27 |
+
model = GPT2LMHeadModel.from_pretrained(hf_model_path)
|
28 |
+
```
|
29 |
+
|
30 |
### generation
|
31 |
+
|
32 |
```python
|
33 |
question = "北京是中国的"
|
34 |
inputs = tokenizer(question,return_tensors='pt')
|