Fix usage example
Browse files
README.md
CHANGED
@@ -61,7 +61,7 @@ tokenizer = AutoTokenizer.from_pretrained(model_path)
|
|
61 |
|
62 |
device = 'cuda'
|
63 |
dtype = torch.bfloat16
|
64 |
-
model =
|
65 |
|
66 |
# Prepare the input text
|
67 |
prompt = 'Complete the paragraph: our solar system is'
|
|
|
61 |
|
62 |
device = 'cuda'
|
63 |
dtype = torch.bfloat16
|
64 |
+
model = AutoModelForCausalLM.from_pretrained(model_path, torch_dtype=dtype, device_map=device)
|
65 |
|
66 |
# Prepare the input text
|
67 |
prompt = 'Complete the paragraph: our solar system is'
|