Update README.md
Browse files
README.md
CHANGED
@@ -22,9 +22,9 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
22 |
from accelerate import infer_auto_device_map, dispatch_model
|
23 |
from accelerate.utils import get_balanced_memory
|
24 |
|
25 |
-
tokenizer = AutoTokenizer.from_pretrained("TigerResearch/tigerbot-7b-sft")
|
26 |
|
27 |
-
model = AutoModelForCausalLM.from_pretrained("TigerResearch/tigerbot-7b-sft")
|
28 |
|
29 |
max_memory = get_balanced_memory(model)
|
30 |
device_map = infer_auto_device_map(model, max_memory=max_memory, no_split_module_classes=["BloomBlock"])
|
|
|
22 |
from accelerate import infer_auto_device_map, dispatch_model
|
23 |
from accelerate.utils import get_balanced_memory
|
24 |
|
25 |
+
tokenizer = AutoTokenizer.from_pretrained("TigerResearch/tigerbot-7b-sft-v1")
|
26 |
|
27 |
+
model = AutoModelForCausalLM.from_pretrained("TigerResearch/tigerbot-7b-sft-v1")
|
28 |
|
29 |
max_memory = get_balanced_memory(model)
|
30 |
device_map = infer_auto_device_map(model, max_memory=max_memory, no_split_module_classes=["BloomBlock"])
|