Update scripts/sentence_transformers_demo.py
Browse files
scripts/sentence_transformers_demo.py
CHANGED
@@ -2,7 +2,7 @@ from sentence_transformers import CrossEncoder
|
|
2 |
from transformers import LlamaTokenizer
|
3 |
import torch
|
4 |
|
5 |
-
model_name = "OpenBMB/
|
6 |
model = CrossEncoder(model_name,max_length=1024,trust_remote_code=True, automodel_args={"torch_dtype": torch.float16})
|
7 |
# You can also use the following code to use flash_attention_2
|
8 |
#model = CrossEncoder(model_name,max_length=1024,trust_remote_code=True, automodel_args={"attn_implementation":"flash_attention_2","torch_dtype": torch.float16})
|
|
|
2 |
from transformers import LlamaTokenizer
|
3 |
import torch
|
4 |
|
5 |
+
model_name = "OpenBMB/MiniCPM-Reranker-Light"
|
6 |
model = CrossEncoder(model_name,max_length=1024,trust_remote_code=True, automodel_args={"torch_dtype": torch.float16})
|
7 |
# You can also use the following code to use flash_attention_2
|
8 |
#model = CrossEncoder(model_name,max_length=1024,trust_remote_code=True, automodel_args={"attn_implementation":"flash_attention_2","torch_dtype": torch.float16})
|