Update to deepseek-coder-7b-base-v1.5 in code
Browse files
README.md
CHANGED
@@ -25,8 +25,8 @@ Here give an example of how to use our model.
|
|
25 |
```python
|
26 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
27 |
import torch
|
28 |
-
tokenizer = AutoTokenizer.from_pretrained("deepseek-ai/deepseek-coder-
|
29 |
-
model = AutoModelForCausalLM.from_pretrained("deepseek-ai/deepseek-coder-
|
30 |
input_text = "#write a quick sort algorithm"
|
31 |
inputs = tokenizer(input_text, return_tensors="pt").cuda()
|
32 |
outputs = model.generate(**inputs, max_length=128)
|
|
|
25 |
```python
|
26 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
27 |
import torch
|
28 |
+
tokenizer = AutoTokenizer.from_pretrained("deepseek-ai/deepseek-coder-7b-base-v1.5", trust_remote_code=True)
|
29 |
+
model = AutoModelForCausalLM.from_pretrained("deepseek-ai/deepseek-coder-7b-base-v1.5", trust_remote_code=True).cuda()
|
30 |
input_text = "#write a quick sort algorithm"
|
31 |
inputs = tokenizer(input_text, return_tensors="pt").cuda()
|
32 |
outputs = model.generate(**inputs, max_length=128)
|