Running
🌖
Character Database of Bangumis (If you need character LoRAs, see: https://huggingface.co./CyberHarem)
from loadimg import load_img
from huggingface_hub import InferenceClient
# or load a local image
my_b64_img = load_img(imgPath_url_pillow_or_numpy ,output_type="base64" )
client = InferenceClient(api_key="hf_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx")
messages = [
{
"role": "user",
"content": [
{
"type": "text",
"text": "Describe this image in one sentence."
},
{
"type": "image_url",
"image_url": {
"url": my_b64_img # base64 allows using images without uploading them to the web
}
}
]
}
]
stream = client.chat.completions.create(
model="meta-llama/Llama-3.2-11B-Vision-Instruct",
messages=messages,
max_tokens=500,
stream=True
)
for chunk in stream:
print(chunk.choices[0].delta.content, end="")
pip install -qU "huggingface_hub>=0.22"
from huggingface_hub import PyTorchModelHubMixin
from torch import nn
class MyModel(nn.Module,PyTorchModelHubMixin):
def __init__(self, a, b):
super().__init__()
self.layer = nn.Linear(a,b)
def forward(self,inputs):
return self.layer(inputs)
first_model = MyModel(3,1)
first_model.push_to_hub("not-lain/test")
pretrained_model = MyModel.from_pretrained("not-lain/test")
import chromadb
from your_custom_fn import CustomEmbeddingFunction
class ChromaStorage:
def __init__(self, config):
self.config = config
self.client = self.init_client()
self.embedding_function = CustomEmbeddingFunction()
def check_config(self):
assert os.path.exists(self.config.path), ValueError('Provided path does not exists!!')
def init_client(self):
return chromadb.PersistentClient(path = self.config.path,)
def init_collection(self, name: str):
return self.client.get_or_create_collection(name = name, embedding_function = self.embedding_function)
git clone https://github.com/OpenAccess-AI-Collective/axolotl
cd axolotl
pip3 install packaging
pip3 install -e '.[flash-attn,deepspeed]'
axolotl\examples\llama-2\qlora.yml
base_model: NousResearch/Llama-2-7b-hf
base_model: TinyLlama/TinyLlama-1.1B-Chat-v1.0
accelerate launch -m axolotl.cli.train examples/llama-2/qlora.yml