SalehAhmad
commited on
Commit
•
d2351f2
1
Parent(s):
cf45fa0
Update README.md
Browse files
README.md
CHANGED
@@ -21,7 +21,7 @@ This model is for the module
|
|
21 |
# Use a huggingafce pipeline as a high-level helper
|
22 |
from transformers import pipeline
|
23 |
import torch
|
24 |
-
|
25 |
model="SalehAhmad/Initial_Knowledge_Assessment_Test-Model-LLAMA7B_3Epochs",
|
26 |
device=torch.device('cuda' if torch.cuda.is_available() else 'cpu'),
|
27 |
torch_dtype=torch.bfloat16,
|
@@ -56,8 +56,9 @@ and so on.
|
|
56 |
Paragraph: {paragraph}
|
57 |
[/INST]</s>Response:'''
|
58 |
|
59 |
-
output =
|
60 |
num_return_sequences=1,
|
61 |
return_full_text=False)
|
|
|
62 |
print(output[0]['generated_text'])
|
63 |
```
|
|
|
21 |
# Use a huggingafce pipeline as a high-level helper
|
22 |
from transformers import pipeline
|
23 |
import torch
|
24 |
+
pipe = pipeline("text-generation",
|
25 |
model="SalehAhmad/Initial_Knowledge_Assessment_Test-Model-LLAMA7B_3Epochs",
|
26 |
device=torch.device('cuda' if torch.cuda.is_available() else 'cpu'),
|
27 |
torch_dtype=torch.bfloat16,
|
|
|
56 |
Paragraph: {paragraph}
|
57 |
[/INST]</s>Response:'''
|
58 |
|
59 |
+
output = pipe(prompt,
|
60 |
num_return_sequences=1,
|
61 |
return_full_text=False)
|
62 |
+
|
63 |
print(output[0]['generated_text'])
|
64 |
```
|