nn-search-full / types /llm_params.py
muryshev's picture
init
b24d496
raw
history blame
1.76 kB
from typing import Optional, List
from pydantic import BaseModel, Field
class LlmPredictParams(BaseModel):
"""
Параметры для предсказания LLM.
"""
system_prompt: Optional[str] = Field(None, description="OpenAI only. Системный промпт.")
user_prompt: Optional[str] = Field(None, description="OpenAI only. Шаблон промпта для передачи от роли user.")
n_predict: Optional[int] = None
temperature: Optional[float] = None
top_k: Optional[int] = None
top_p: Optional[float] = None
min_p: Optional[float] = None
seed: Optional[int] = None
repeat_penalty: Optional[float] = None
repeat_last_n: Optional[int] = None
retry_if_text_not_present: Optional[str] = None
retry_count: Optional[int] = None
presence_penalty: Optional[float] = None
frequency_penalty: Optional[float] = None
n_keep: Optional[int] = None
cache_prompt: Optional[bool] = None
stop: Optional[List[str]] = None
class LlmParams(BaseModel):
"""
Основные параметры для LLM.
"""
name: str
url: str
type: str
context: int
default: Optional[bool] = None
template: Optional[str] = None
predict_params: Optional[LlmPredictParams] = None
# Пример использования
query = {
"name": "example-model",
"url": "http://example.com",
"type": "openai",
"context": 1024,
"default": True,
"template": "Some template",
"predict_params": {
"system_prompt": "Welcome!",
"temperature": 0.7,
"retry_count": 3,
"stop": ["END"]
}
}
# Валидация данных
llm_params = LlmParams(**query)
print(llm_params.json(indent=2))