Spaces:
Sleeping
Sleeping
File size: 1,030 Bytes
d2090da |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
from llama_cpp import Llama
from yaml import load, Loader
from prompting import add_prompt
params = load(open('params.yml').read(), Loader=Loader)
def get_score(
paragraph_1: str,
paragraph_2: str,
instruction: str=params['instruction'],
model_path: str=params['model_file_path'],
context_size: int=params['context_size'],
max_tokens: int=params['max_tokens'],
temperature: float=params['temperature'],
top_p: float=params['top_p'],
echo: bool=params['echo'],
):
try:
prompt = add_prompt(paragraph_1, paragraph_2, instruction)
llm = Llama(model_path=model_path, n_ctx=context_size)
output = llm(
prompt,
max_tokens=max_tokens,
temperature=temperature,
top_p=top_p,
echo=echo,
stop = '#'
)
return output["choices"][0]["text"]
except Exception as e:
print(f'An error occured in the function `get_score`:\n{e}') |