Using llama_cpp
#12
by
axcelkuhn
- opened
How can i make this works with this model?
model = Llama(
model_path=model_name,
n_ctx=2000,
n_parts=1,
)
def get_message_tokens(model, role, content):
message_tokens = model.tokenize(content.encode("utf-8"))
message_tokens.insert(1, ROLE_TOKENS[role])
message_tokens.insert(2, LINEBREAK_TOKEN)
message_tokens.append(model.token_eos())
return message_tokens
def get_system_tokens(model):
system_message = {"role": "system", "content": SYSTEM_PROMPT}
return get_message_tokens(model, **system_message)
i need change here system_message = {"role": "system", "content": SYSTEM_PROMPT}?