Spaces:
Sleeping
Sleeping
from llama_cpp import Llama | |
from yaml import load, Loader | |
from prompting import add_prompt | |
params = load(open('params.yml').read(), Loader=Loader) | |
def get_score( | |
paragraph_1: str, | |
paragraph_2: str, | |
instruction: str=params['instruction'], | |
model_path: str=params['model_file_path'], | |
context_size: int=params['context_size'], | |
max_tokens: int=params['max_tokens'], | |
temperature: float=params['temperature'], | |
top_p: float=params['top_p'], | |
echo: bool=params['echo'], | |
): | |
try: | |
prompt = add_prompt(paragraph_1, paragraph_2, instruction) | |
llm = Llama(model_path=model_path, n_ctx=context_size) | |
output = llm( | |
prompt, | |
max_tokens=max_tokens, | |
temperature=temperature, | |
top_p=top_p, | |
echo=echo, | |
stop = '#' | |
) | |
return output["choices"][0]["text"] | |
except Exception as e: | |
print(f'An error occured in the function `get_score`:\n{e}') |