Update README.md
Browse files
README.md
CHANGED
@@ -12,7 +12,7 @@ tags:
|
|
12 |
- portuguese
|
13 |
---
|
14 |
|
15 |
-
#
|
16 |
|
17 |
<p align="center">
|
18 |
<img src="https://raw.githubusercontent.com/rhaymisonbetini/huggphotos/main/luana-hel.jpeg" alt="Bode Logo" width="50%" style="margin-left:'auto' margin-right:'auto' display:'block'"/>
|
@@ -36,8 +36,8 @@ Important points like these help models (even smaller models like 7b) to perform
|
|
36 |
!pip install -q -U bitsandbytes
|
37 |
|
38 |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextStreamer
|
39 |
-
model = AutoModelForCausalLM.from_pretrained("rhaymison/Mistral-luana-
|
40 |
-
tokenizer = AutoTokenizer.from_pretrained("rhaymison/Mistral-luana-
|
41 |
model.eval()
|
42 |
|
43 |
```
|
|
|
12 |
- portuguese
|
13 |
---
|
14 |
|
15 |
+
# Mistral-portuguese-luana-7b-mental-health
|
16 |
|
17 |
<p align="center">
|
18 |
<img src="https://raw.githubusercontent.com/rhaymisonbetini/huggphotos/main/luana-hel.jpeg" alt="Bode Logo" width="50%" style="margin-left:'auto' margin-right:'auto' display:'block'"/>
|
|
|
36 |
!pip install -q -U bitsandbytes
|
37 |
|
38 |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextStreamer
|
39 |
+
model = AutoModelForCausalLM.from_pretrained("rhaymison/Mistral-portuguese-luana-7b-mental-health", device_map= {"": 0})
|
40 |
+
tokenizer = AutoTokenizer.from_pretrained("rhaymison/Mistral-portuguese-luana-7b-mental-health")
|
41 |
model.eval()
|
42 |
|
43 |
```
|