Spaces:
Running
Running
File size: 4,165 Bytes
34a1cb8 d748c8c f1d330d 34a1cb8 d748c8c 34a1cb8 0035a5e ef0b9e0 0035a5e 34a1cb8 e924beb 34a1cb8 fff64e0 34a1cb8 59fe2a0 34a1cb8 fff64e0 e924beb 59fe2a0 34a1cb8 008e3b5 34a1cb8 008e3b5 34a1cb8 0035a5e fa08756 34a1cb8 a48c931 34a1cb8 7d288f7 262dde0 34a1cb8 80094f6 34a1cb8 931f20c b800271 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 |
import gradio as gr
from collections.abc import Generator
from openai import OpenAI
# from gradio.chat_interface import ChatInterface
from chat_interface import ChatInterface
USERNAME = "ahmedheakl"
SPACE_NAME = "AIN-Arabic-VLM"
TITLE = "Welcome to AIN Chatbot"
DESCRIPTION = "Welcome to the AIN Arabic VLM chatbot. The most comprehensive Arabic-English LMM developed by MBZUAI."
TOP_N_HISTORY = 2
LOGO_PATH = "https://huggingface.co./spaces/ahmedheakl/AIN-Arabic-VLM/resolve/main/logo.jpeg"
def get_gradio_url(path: str) -> str:
# If path is already an absolute URL, return it as is.
if path.startswith("http"):
return path
return f"https://{USERNAME}-{SPACE_NAME}.hf.space/gradio_api/file={path}"
def history_to_messages(history: list) -> list:
messages = []
merge = False
for i, h in enumerate(history):
content = h.get("content", [])
role = h.get("role", "")
ct = []
if isinstance(content, tuple):
src_path = content[0]
ct = [{"type": "image_url", "image_url": {"url": get_gradio_url(src_path)}}]
else:
ct = [{"type": "text", "text": content}]
if merge:
messages[-1]["content"].extend(ct)
merge = False
else:
messages.append({"role": role, "content": ct})
if i < len(history) - 1 and role == history[i + 1].get("role", ""):
merge = True
return messages
def load_chat(
base_url: str,
model: str,
token: str | None = None,
*,
system_message: str | None = None,
**kwargs,
) -> gr.ChatInterface:
client = OpenAI(api_key=token, base_url=base_url)
start_message = (
[{"role": "system", "content": system_message}] if system_message else []
)
def open_api_stream(
message: str, history: list | None
) -> Generator[str, None, None]:
history = history or start_message
print(history)
if len(history) > 0 and isinstance(history[0], (list, tuple)):
history = history[-TOP_N_HISTORY:]
history = ChatInterface._tuples_to_messages(history)
elif len(history) > 0 and isinstance(history[0], dict):
history = history_to_messages(history)
history = history[-TOP_N_HISTORY:]
files = message.get('files', [])
text = message.get('text', 'Describe the photo in detail.')
if text.strip() == "": text = 'Describe the photo in detail.'
content = [
{"type": "text", "text": text}
]
if files:
src_path = files[0]
content.append({"type": "image_url", "image_url": {"url": get_gradio_url(src_path)}})
messages = history + [{"role": "user", "content": content}]
stream = client.chat.completions.create(
model=model,
messages=messages,
stream=True,
)
response = ""
for chunk in stream:
if chunk.choices[0].delta.content is not None:
response += chunk.choices[0].delta.content
yield response
return gr.ChatInterface(
open_api_stream, type="messages", **kwargs,
)
load_chat(
"https://f362-5-195-0-150.ngrok-free.app/v1",
model="test",
token="ollama",
multimodal=True,
title=TITLE,
description=DESCRIPTION,
theme="ocean",
# examples=[
# {
# "text": "ุฃุฎุจุฑูู ู
ุง ุงุณู
ุงูู
ุจูู ุงูู
ูุฌูุฏ ูู ุงูุตูุฑุฉ ูุงูุดูุก ุงูู
ุซูุฑ ููุงูุชู
ุงู
ููู",
# "files": ["https://cdn.mos.cms.futurecdn.net/5HrnHp9ybAqYrtruKAsfkN-1200-80.jpg"],
# },
# {
# "text": "ู
ุง ูู ุงูุนูู
ุงูู
ูุฌูุฏ ูู ุงูุตูุฑุฉุ",
# "files": ["https://mtc.ae/wp-content/uploads/2023/09/Satin-UAE-Flag-UAE-F-B-Blank.jpg"],
# },
# {
# "text": "How many people are there in the image?",
# "files": ["https://i0.wp.com/eatpitapita.com/wp-content/uploads/2020/02/Arab-Muslim-or-Middle-Eastern-Preview.jpg"]
# },
# ],
# cache_examples=False
).queue().launch(allowed_paths=["/static"]) |