from transformers import AutoTokenizer, AutoModel,pipeline | |
# | |
# tokenizer = AutoTokenizer.from_pretrained(".\\", trust_remote_code=True) | |
# # model = AutoModel.from_pretrained(".\\", trust_remote_code=True).float() | |
# model = AutoModel.from_pretrained(".\\", trust_remote_code=True) | |
# model = model.eval() | |
# response, history = model.chat(tokenizer, "你好", history=[]) | |
# print("response:", response) | |
npl = pipeline('') |