Update services/model_manager.py
Browse files- services/model_manager.py +23 -0
services/model_manager.py
CHANGED
@@ -7,6 +7,18 @@ import logging
|
|
7 |
from functools import lru_cache
|
8 |
from config.config import GenerationConfig, ModelConfig
|
9 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
|
11 |
class ModelManager:
|
12 |
def __init__(self, device: Optional[str] = None):
|
@@ -15,6 +27,17 @@ class ModelManager:
|
|
15 |
self.models: Dict[str, Any] = {}
|
16 |
self.tokenizers: Dict[str, Any] = {}
|
17 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
def load_model(self, model_id: str, model_path: str, model_type: str, config: ModelConfig) -> None:
|
19 |
"""Load a model with specified configuration."""
|
20 |
try:
|
|
|
7 |
from functools import lru_cache
|
8 |
from config.config import GenerationConfig, ModelConfig
|
9 |
|
10 |
+
from langfuse.decorators import observe, langfuse_context
|
11 |
+
import os
|
12 |
+
|
13 |
+
# Initialize Langfuse
|
14 |
+
os.environ["LANGFUSE_PUBLIC_KEY"] = "pk-lf-04d2302a-aa5c-4870-9703-58ab64c3bcae"
|
15 |
+
os.environ["LANGFUSE_SECRET_KEY"] = "sk-lf-d34ea200-feec-428e-a621-784fce93a5af"
|
16 |
+
os.environ["LANGFUSE_HOST"] = "https://chris4k-langfuse-template-space.hf.space" # 🇪🇺 EU region
|
17 |
+
|
18 |
+
try:
|
19 |
+
langfuse = Langfuse()
|
20 |
+
except Exception as e:
|
21 |
+
print("Langfuse Offline")
|
22 |
|
23 |
class ModelManager:
|
24 |
def __init__(self, device: Optional[str] = None):
|
|
|
27 |
self.models: Dict[str, Any] = {}
|
28 |
self.tokenizers: Dict[str, Any] = {}
|
29 |
|
30 |
+
def load_model(self, model_name: str):
|
31 |
+
# Code to load your model, e.g., Hugging Face's transformers library
|
32 |
+
from transformers import AutoModelForCausalLM
|
33 |
+
return AutoModelForCausalLM.from_pretrained(model_name)
|
34 |
+
|
35 |
+
@observe()
|
36 |
+
def load_tokenizer(self, model_name: str):
|
37 |
+
# Load the tokenizer associated with the model
|
38 |
+
from transformers import AutoTokenizer
|
39 |
+
return AutoTokenizer.from_pretrained(model_name)
|
40 |
+
|
41 |
def load_model(self, model_id: str, model_path: str, model_type: str, config: ModelConfig) -> None:
|
42 |
"""Load a model with specified configuration."""
|
43 |
try:
|