myezrag / app.py
ginipick's picture
Update app.py
5325327 verified
raw
history blame
30.7 kB
import gradio as gr
from huggingface_hub import InferenceClient
import os
import pandas as pd
from typing import List, Dict, Tuple
import json
import io
import traceback
import csv
from openai import OpenAI
from functools import lru_cache
from concurrent.futures import ThreadPoolExecutor
import math
# CSS ์„ค์ •
css = """
footer {
visibility: hidden;
}
#chatbot-container, #chatbot-data-upload {
height: 700px;
overflow-y: scroll;
}
#chatbot-container .message, #chatbot-data-upload .message {
font-size: 14px;
}
/* ์ž…๋ ฅ์ฐฝ ๋ฐฐ๊ฒฝ์ƒ‰ ๋ฐ ๊ธ€์ž์ƒ‰ ๋ณ€๊ฒฝ */
textarea, input[type="text"] {
background-color: #ffffff;
color: #000000;
}
/* ํŒŒ์ผ ์—…๋กœ๋“œ ์˜์—ญ ๋†’์ด ์กฐ์ ˆ */
#parquet-upload-area {
max-height: 150px;
overflow-y: auto;
}
/* ์ดˆ๊ธฐ ์„ค๋ช… ๊ธ€์”จ ํฌ๊ธฐ ์กฐ์ ˆ */
#initial-description {
font-size: 14px;
}
/* API Key ์ž…๋ ฅ ์„น์…˜ ์Šคํƒ€์ผ */
.api-key-section {
margin: 10px 0;
padding: 10px;
border: 1px solid #ddd;
border-radius: 5px;
}
.api-key-status {
margin-top: 5px;
font-weight: bold;
}
"""
# ์ถ”๋ก  API ํด๋ผ์ด์–ธํŠธ ์„ค์ •
hf_client = InferenceClient(
"CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN")
)
def load_code(filename: str) -> str:
try:
with open(filename, 'r', encoding='utf-8') as file:
return file.read()
except FileNotFoundError:
return f"{filename} ํŒŒ์ผ์„ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค."
except Exception as e:
return f"ํŒŒ์ผ์„ ์ฝ๋Š” ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}"
def load_parquet(filename: str) -> str:
try:
df = pd.read_parquet(filename, engine='pyarrow')
return df.head(10).to_markdown(index=False)
except FileNotFoundError:
return f"{filename} ํŒŒ์ผ์„ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค."
except Exception as e:
return f"ํŒŒ์ผ์„ ์ฝ๋Š” ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}"
def clean_response(text: str) -> str:
"""์‘๋‹ต ํ…์ŠคํŠธ ์ •์ œ ํ•จ์ˆ˜"""
sentences = [s.strip() for s in text.split('.') if s.strip()]
unique_sentences = []
seen = set()
for sentence in sentences:
normalized = ' '.join(sentence.lower().split())
if normalized not in seen:
seen.add(normalized)
unique_sentences.append(sentence)
cleaned_text = '. '.join(unique_sentences)
if cleaned_text and not cleaned_text.endswith('.'):
cleaned_text += '.'
return cleaned_text
def remove_duplicates(text: str) -> str:
"""์ค‘๋ณต ๋ฌธ์žฅ ์ œ๊ฑฐ ํ•จ์ˆ˜"""
sentences = text.split('.')
unique_sentences = []
seen = set()
for sentence in sentences:
sentence = sentence.strip()
if sentence and sentence not in seen:
seen.add(sentence)
unique_sentences.append(sentence)
return '. '.join(unique_sentences)
def upload_csv(file_path: str) -> Tuple[str, str]:
try:
df = pd.read_csv(file_path, sep=',')
required_columns = {'id', 'text', 'label', 'metadata'}
available_columns = set(df.columns)
missing_columns = required_columns - available_columns
if missing_columns:
return f"CSV ํŒŒ์ผ์— ๋‹ค์Œ ํ•„์ˆ˜ ์ปฌ๋Ÿผ์ด ๋ˆ„๋ฝ๋˜์—ˆ์Šต๋‹ˆ๋‹ค: {', '.join(missing_columns)}", ""
df.drop_duplicates(inplace=True)
df.fillna('', inplace=True)
df = df.astype({'id': 'int32', 'text': 'string', 'label': 'category', 'metadata': 'string'})
parquet_filename = os.path.splitext(os.path.basename(file_path))[0] + '.parquet'
df.to_parquet(parquet_filename, engine='pyarrow', compression='snappy')
return f"{parquet_filename} ํŒŒ์ผ์ด ์„ฑ๊ณต์ ์œผ๋กœ ์—…๋กœ๋“œ๋˜๊ณ  ๋ณ€ํ™˜๋˜์—ˆ์Šต๋‹ˆ๋‹ค.", parquet_filename
except Exception as e:
return f"CSV ํŒŒ์ผ ์—…๋กœ๋“œ ๋ฐ ๋ณ€ํ™˜ ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}", ""
def upload_parquet(file_path: str) -> Tuple[str, str, str]:
try:
df = pd.read_parquet(file_path, engine='pyarrow')
data_info = {
"์ด ๋ ˆ์ฝ”๋“œ ์ˆ˜": len(df),
"์ปฌ๋Ÿผ ๋ชฉ๋ก": list(df.columns),
"๋ฐ์ดํ„ฐ ํƒ€์ž…": df.dtypes.to_dict(),
"๊ฒฐ์ธก์น˜ ์ •๋ณด": df.isnull().sum().to_dict()
}
summary = []
summary.append(f"### ๋ฐ์ดํ„ฐ์…‹ ๊ธฐ๋ณธ ์ •๋ณด:")
summary.append(f"- ์ด ๋ ˆ์ฝ”๋“œ ์ˆ˜: {data_info['์ด ๋ ˆ์ฝ”๋“œ ์ˆ˜']}")
summary.append(f"- ์ปฌ๋Ÿผ ๋ชฉ๋ก: {', '.join(data_info['์ปฌ๋Ÿผ ๋ชฉ๋ก'])}")
summary.append("\n### ์ปฌ๋Ÿผ๋ณ„ ์ •๋ณด:")
for col in df.columns:
if df[col].dtype in ['int64', 'float64']:
stats = df[col].describe()
summary.append(f"\n{col} (์ˆ˜์น˜ํ˜•):")
summary.append(f"- ํ‰๊ท : {stats['mean']:.2f}")
summary.append(f"- ์ตœ์†Œ: {stats['min']}")
summary.append(f"- ์ตœ๋Œ€: {stats['max']}")
elif df[col].dtype == 'object' or df[col].dtype == 'string':
unique_count = df[col].nunique()
summary.append(f"\n{col} (ํ…์ŠคํŠธ):")
summary.append(f"- ๊ณ ์œ ๊ฐ’ ์ˆ˜: {unique_count}")
if unique_count < 10:
value_counts = df[col].value_counts().head(5)
summary.append("- ์ƒ์œ„ 5๊ฐœ ๊ฐ’:")
for val, count in value_counts.items():
summary.append(f" โ€ข {val}: {count}๊ฐœ")
preview = df.head(10).to_markdown(index=False)
summary.append("\n### ๋ฐ์ดํ„ฐ ๋ฏธ๋ฆฌ๋ณด๊ธฐ:")
summary.append(preview)
parquet_content = "\n".join(summary)
parquet_json = df.to_json(orient='records', force_ascii=False)
return "Parquet ํŒŒ์ผ์ด ์„ฑ๊ณต์ ์œผ๋กœ ์—…๋กœ๋“œ๋˜์—ˆ์Šต๋‹ˆ๋‹ค.", parquet_content, parquet_json
except Exception as e:
return f"Parquet ํŒŒ์ผ ์—…๋กœ๋“œ ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}", "", ""
def text_to_parquet(text: str) -> Tuple[str, str, str]:
try:
lines = [line.strip() for line in text.split('\n') if line.strip()]
data = []
for line in lines:
try:
import re
pattern = r'(\d+),([^,]+),([^,]+),(.+)'
match = re.match(pattern, line)
if match:
id_val, text_val, label_val, metadata_val = match.groups()
text_val = text_val.strip().strip('"')
label_val = label_val.strip().strip('"')
metadata_val = metadata_val.strip().strip('"')
data.append({
'id': int(id_val),
'text': text_val,
'label': label_val,
'metadata': metadata_val
})
except Exception as e:
print(f"๋ผ์ธ ํŒŒ์‹ฑ ์˜ค๋ฅ˜: {line}\n{str(e)}")
continue
if not data:
return "๋ณ€ํ™˜ํ•  ๋ฐ์ดํ„ฐ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.", "", ""
df = pd.DataFrame(data)
df = df.astype({
'id': 'int32',
'text': 'string',
'label': 'string',
'metadata': 'string'
})
parquet_filename = 'text_to_parquet.parquet'
df.to_parquet(parquet_filename, engine='pyarrow', compression='snappy')
preview = df.to_markdown(index=False)
return (
f"{parquet_filename} ํŒŒ์ผ์ด ์„ฑ๊ณต์ ์œผ๋กœ ๋ณ€ํ™˜๋˜์—ˆ์Šต๋‹ˆ๋‹ค. ์ด {len(df)}๊ฐœ์˜ ๋ ˆ์ฝ”๋“œ๊ฐ€ ์ฒ˜๋ฆฌ๋˜์—ˆ์Šต๋‹ˆ๋‹ค.",
preview,
parquet_filename
)
except Exception as e:
error_message = f"ํ…์ŠคํŠธ ๋ณ€ํ™˜ ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}"
print(f"{error_message}\n{traceback.format_exc()}")
return error_message, "", ""
def respond(message: str, history: List[Dict[str, str]], system_message: str = "", max_tokens: int = 4000, temperature: float = 0.5, top_p: float = 0.9, parquet_data: str = None, api_key: str = None) -> str:
if not api_key:
yield "โš ๏ธ API Key๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค. ์„œ๋น„์Šค ์ด์šฉ์„ ์œ„ํ•ด API Key๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."
return
# OpenAI ํด๋ผ์ด์–ธํŠธ ์ดˆ๊ธฐํ™”
client = OpenAI(api_key=api_key)
system_prefix = """๋ฐ˜๋“œ์‹œ ํ•œ๊ธ€๋กœ ๋‹ต๋ณ€ํ•  ๊ฒƒ. ๋„ˆ๋Š” ์—…๋กœ๋“œ๋œ ๋ฐ์ดํ„ฐ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ์งˆ๋ฌธ์— ๋‹ต๋ณ€ํ•˜๋Š” ์—ญํ• ์„ ํ•œ๋‹ค.
์ฃผ์š” ์ง€์นจ:
1. ์งˆ๋ฌธ๊ณผ ์ง์ ‘ ๊ด€๋ จ๋œ ๋‚ด์šฉ๋งŒ ๊ฐ„๋‹จ๋ช…๋ฃŒํ•˜๊ฒŒ ๋‹ต๋ณ€ํ•  ๊ฒƒ
2. ์ด์ „ ๋‹ต๋ณ€๊ณผ ์ค‘๋ณต๋˜๋Š” ๋‚ด์šฉ์€ ์ œ์™ธํ•  ๊ฒƒ
3. ๋ถˆํ•„์š”ํ•œ ์˜ˆ์‹œ๋‚˜ ๋ถ€์—ฐ ์„ค๋ช…์€ ํ•˜์ง€ ๋ง ๊ฒƒ
4. ๋™์ผํ•œ ๋‚ด์šฉ์„ ๋‹ค๋ฅธ ํ‘œํ˜„์œผ๋กœ ๋ฐ˜๋ณตํ•˜์ง€ ๋ง ๊ฒƒ
5. ํ•ต์‹ฌ ์ •๋ณด๋งŒ ์ „๋‹ฌํ•  ๊ฒƒ
"""
if parquet_data:
try:
df = pd.read_json(io.StringIO(parquet_data))
data_summary = df.describe(include='all').to_string()
system_prefix += f"\n\n๋ฐ์ดํ„ฐ ์š”์•ฝ:\n{data_summary}"
except Exception as e:
print(f"๋ฐ์ดํ„ฐ ๋กœ๋“œ ์˜ค๋ฅ˜: {str(e)}")
messages = [{"role": "system", "content": system_prefix}]
recent_history = history[-3:] if history else []
for chat in recent_history:
messages.append({"role": chat["role"], "content": chat["content"]})
messages.append({"role": "user", "content": message})
try:
response = client.chat.completions.create(
model="gpt-4o-mini",
messages=messages,
max_tokens=max_tokens,
temperature=temperature,
top_p=top_p,
stream=True
)
full_response = ""
for chunk in response:
if chunk.choices[0].delta.content:
full_response += chunk.choices[0].delta.content
yield clean_response(full_response)
except Exception as e:
error_message = f"์‘๋‹ต ์ƒ์„ฑ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
print(f"{error_message}\n{traceback.format_exc()}")
yield error_message
def preprocess_text_with_llm(input_text: str, api_key: str = None) -> str:
if not api_key:
return "โš ๏ธ API Key๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค. ์„œ๋น„์Šค ์ด์šฉ์„ ์œ„ํ•ด API Key๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."
# OpenAI ํด๋ผ์ด์–ธํŠธ ์ดˆ๊ธฐํ™”
client = OpenAI(api_key=api_key)
system_prompt = """๋ฐ˜๋“œ์‹œ ํ•œ๊ธ€(ํ•œ๊ตญ์–ด)๋กœ ๋‹ต๋ณ€ํ•˜์‹œ์˜ค. ๋‹น์‹ ์€ ๋ฐ์ดํ„ฐ ์ „์ฒ˜๋ฆฌ ์ „๋ฌธ๊ฐ€์ž…๋‹ˆ๋‹ค. ์ž…๋ ฅ๋œ ํ…์ŠคํŠธ๋ฅผ CSV ๋ฐ์ดํ„ฐ์…‹ ํ˜•์‹์œผ๋กœ ๋ณ€ํ™˜ํ•˜์„ธ์š”.
๊ทœ์น™:
1. ์ถœ๋ ฅ ํ˜•์‹: id,text,label,metadata
2. id: 1๋ถ€ํ„ฐ ์‹œ์ž‘ํ•˜๋Š” ์ˆœ์ฐจ์  ๋ฒˆํ˜ธ
3. text: ์˜๋ฏธ ์žˆ๋Š” ๋‹จ์œ„๋กœ ๋ถ„๋ฆฌ๋œ ํ…์ŠคํŠธ
4. label: ํ…์ŠคํŠธ์˜ ์ฃผ์ œ๋‚˜ ์นดํ…Œ๊ณ ๋ฆฌ๋ฅผ ์•„๋ž˜ ๊ธฐ์ค€์œผ๋กœ ์ •ํ™•ํ•˜๊ฒŒ ํ•œ ๊ฐœ๋งŒ ์„ ํƒ
- Historical_Figure (์—ญ์‚ฌ์  ์ธ๋ฌผ)
- Military_History (๊ตฐ์‚ฌ ์—ญ์‚ฌ)
- Technology (๊ธฐ์ˆ )
- Politics (์ •์น˜)
- Culture (๋ฌธํ™”)
5. metadata: ๋‚ ์งœ, ์ถœ์ฒ˜ ๋“ฑ ์ถ”๊ฐ€ ์ •๋ณด"""
try:
response = client.chat.completions.create(
model="gpt-4-0125-preview",
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": input_text}
],
max_tokens=4000,
temperature=0.1,
stream=True
)
full_response = ""
for chunk in response:
if chunk.choices[0].delta.content:
full_response += chunk.choices[0].delta.content
processed_text = clean_response(full_response)
try:
from io import StringIO
import csv
csv.reader(StringIO(processed_text))
return processed_text
except csv.Error:
return "LLM์ด ์˜ฌ๋ฐ”๋ฅธ CSV ํ˜•์‹์„ ์ƒ์„ฑํ•˜์ง€ ๋ชปํ–ˆ์Šต๋‹ˆ๋‹ค. ๋‹ค์‹œ ์‹œ๋„ํ•ด์ฃผ์„ธ์š”."
except Exception as e:
error_message = f"์ „์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}"
print(error_message)
return error_message
# Gradio Blocks ์ธํ„ฐํŽ˜์ด์Šค ์„ค์ •
with gr.Blocks(css=css) as demo:
api_key_state = gr.State("") # API ํ‚ค๋ฅผ ์ €์žฅํ•  State ์ถ”๊ฐ€
gr.Markdown("# MyEzRAG: LLM์ด ๋‚˜๋งŒ์˜ ๋ฐ์ดํ„ฐ๋กœ ํ•™์Šตํ•œ ์ฝ˜ํ…์ธ  ์ƒ์„ฑ/๋‹ต๋ณ€", elem_id="initial-description")
# API ํ‚ค ์ž…๋ ฅ ์„น์…˜ ์ถ”๊ฐ€
with gr.Row(elem_classes="api-key-section"):
with gr.Column(scale=3):
api_key_input = gr.Textbox(
label="OpenAI API Key",
placeholder="sk-...",
type="password",
show_label=True
)
with gr.Column(scale=1):
api_key_button = gr.Button("API Key ์„ค์ •", variant="primary")
# API ํ‚ค ์ƒํƒœ ํ‘œ์‹œ
api_key_status = gr.Markdown("โš ๏ธ API Key๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค. ์„œ๋น„์Šค ์ด์šฉ์„ ์œ„ํ•ด API Key๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”.", elem_classes="api-key-status")
# API ํ‚ค ์„ค์ • ํ•จ์ˆ˜
def set_api_key(api_key: str):
if not api_key.strip():
return "โš ๏ธ API Key๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค. ์„œ๋น„์Šค ์ด์šฉ์„ ์œ„ํ•ด API Key๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”.", ""
if not api_key.startswith("sk-"):
return "โŒ ์˜ฌ๋ฐ”๋ฅด์ง€ ์•Š์€ API Key ํ˜•์‹์ž…๋‹ˆ๋‹ค. ๋‹ค์‹œ ํ™•์ธํ•ด์ฃผ์„ธ์š”.", ""
return "โœ… API Key๊ฐ€ ์„ฑ๊ณต์ ์œผ๋กœ ์„ค์ •๋˜์—ˆ์Šต๋‹ˆ๋‹ค.", api_key
# API ํ‚ค ์„ค์ • ์ด๋ฒคํŠธ ์—ฐ๊ฒฐ
api_key_button.click(
set_api_key,
inputs=[api_key_input],
outputs=[api_key_status, api_key_state]
)
gr.Markdown(
"### '์‚ฌ์šฉ ๋ฐฉ๋ฒ•' ํƒญ์„ ํ†ตํ•ด ์ž์„ธํ•œ ์ด์šฉ ๋ฐฉ๋ฒ•์„ ์ฐธ๊ณ ํ•˜์„ธ์š”.\n"
"### Tip) '์˜ˆ์ œ'๋ฅผ ํ†ตํ•ด ๋‹ค์–‘ํ•œ ํ™œ์šฉ ๋ฐฉ๋ฒ•์„ ์ฒดํ—˜ํ•˜๊ณ  ์‘์šฉํ•ด ๋ณด์„ธ์š”, ๋ฐ์ดํ„ฐ์…‹ ์—…๋กœ๋“œ์‹œ ๋ฏธ๋ฆฌ๋ณด๊ธฐ๋Š” 10๊ฑด๋งŒ ์ถœ๋ ฅ",
elem_id="initial-description"
)
# ์ฒซ ๋ฒˆ์งธ ํƒญ: My ๋ฐ์ดํ„ฐ์…‹+LLM
with gr.Tab("My ๋ฐ์ดํ„ฐ์…‹+LLM"):
gr.Markdown("### LLM๊ณผ ๋Œ€ํ™”ํ•˜๊ธฐ")
chatbot_data_upload = gr.Chatbot(label="์ฑ—๋ด‡", type="messages", elem_id="chatbot-data-upload")
msg_data_upload = gr.Textbox(label="๋ฉ”์‹œ์ง€ ์ž…๋ ฅ", placeholder="์—ฌ๊ธฐ์— ๋ฉ”์‹œ์ง€๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”...")
send_data_upload = gr.Button("์ „์†ก")
with gr.Accordion("์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ ๋ฐ ์˜ต์…˜ ์„ค์ •", open=False):
system_message = gr.Textbox(label="System Message", value="๋„ˆ๋Š” AI ์กฐ์–ธ์ž ์—ญํ• ์ด๋‹ค.")
max_tokens = gr.Slider(minimum=1, maximum=8000, value=1000, label="Max Tokens")
temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature")
top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P")
parquet_data_state = gr.State()
def handle_message_data_upload(message: str, history: List[Dict[str, str]], system_message: str, max_tokens: int, temperature: float, top_p: float, parquet_data: str, api_key: str):
if not api_key:
history = history or []
history.append({"role": "assistant", "content": "โš ๏ธ API Key๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค. ์„œ๋น„์Šค ์ด์šฉ์„ ์œ„ํ•ด API Key๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."})
yield history, ""
return
history = history or []
recent_questions = [chat['content'].strip().lower() for chat in history[-3:] if chat['role'] == 'user']
if message.strip().lower() in recent_questions:
yield history + [{"role": "assistant", "content": "๋™์ผํ•œ ์งˆ๋ฌธ์ด ์ตœ๊ทผ์— ์žˆ์—ˆ์Šต๋‹ˆ๋‹ค. ๋‹ค๋ฅธ ์งˆ๋ฌธ์„ ํ•ด์ฃผ์„ธ์š”."}], ""
return
try:
history.append({"role": "user", "content": message})
response_gen = respond(
message,
history,
system_message,
max_tokens,
temperature=0.3,
top_p=top_p,
parquet_data=parquet_data,
api_key=api_key
)
partial_response = ""
for partial in response_gen:
partial_response = partial
display_history = history + [{"role": "assistant", "content": partial_response}]
yield display_history, ""
history.append({"role": "assistant", "content": partial_response})
except Exception as e:
response = f"์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
history.append({"role": "assistant", "content": response})
yield history, ""
send_data_upload.click(
handle_message_data_upload,
inputs=[
msg_data_upload,
chatbot_data_upload,
system_message,
max_tokens,
temperature,
top_p,
parquet_data_state,
api_key_state,
],
outputs=[chatbot_data_upload, msg_data_upload],
queue=True
)
# ์˜ˆ์ œ ์ถ”๊ฐ€
with gr.Accordion("์˜ˆ์ œ", open=False):
gr.Examples(
examples=[
["์—…๋กœ๋“œ๋œ ๋ฐ์ดํ„ฐ์…‹์— ๋Œ€ํ•ด ์š”์•ฝ ์„ค๋ช…ํ•˜๋ผ."],
["์—…๋กœ๋“œ๋œ ๋ฐ์ดํ„ฐ์…‹ ํŒŒ์ผ์„ ํ•™์Šต ๋ฐ์ดํ„ฐ๋กœ ํ™œ์šฉํ•˜์—ฌ, ๋ณธ ์„œ๋น„์Šค๋ฅผ SEO ์ตœ์ ํ™”ํ•˜์—ฌ ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŠธ(๊ฐœ์š”, ๋ฐฐ๊ฒฝ ๋ฐ ํ•„์š”์„ฑ, ๊ธฐ์กด ์œ ์‚ฌ ์ œํ’ˆ/์„œ๋น„์Šค์™€ ๋น„๊ตํ•˜์—ฌ ํŠน์žฅ์ , ํ™œ์šฉ์ฒ˜, ๊ฐ€์น˜, ๊ธฐ๋Œ€ํšจ๊ณผ, ๊ฒฐ๋ก ์„ ํฌํ•จ)๋กœ 4000 ํ† ํฐ ์ด์ƒ ์ž‘์„ฑํ•˜๋ผ"],
["์—…๋กœ๋“œ๋œ ๋ฐ์ดํ„ฐ์…‹ ํŒŒ์ผ์„ ํ•™์Šต ๋ฐ์ดํ„ฐ๋กœ ํ™œ์šฉํ•˜์—ฌ, ์‚ฌ์šฉ ๋ฐฉ๋ฒ•๊ณผ ์ฐจ๋ณ„์ , ํŠน์ง•, ๊ฐ•์ ์„ ์ค‘์‹ฌ์œผ๋กœ 4000 ํ† ํฐ ์ด์ƒ ์œ ํŠœ๋ธŒ ์˜์ƒ ์Šคํฌ๋ฆฝํŠธ ํ˜•ํƒœ๋กœ ์ž‘์„ฑํ•˜๋ผ"],
["์—…๋กœ๋“œ๋œ ๋ฐ์ดํ„ฐ์…‹ ํŒŒ์ผ์„ ํ•™์Šต ๋ฐ์ดํ„ฐ๋กœ ํ™œ์šฉํ•˜์—ฌ, ์ œํ’ˆ ์ƒ์„ธ ํŽ˜์ด์ง€ ํ˜•์‹์˜ ๋‚ด์šฉ์„ 4000 ํ† ํฐ ์ด์ƒ ์ž์„ธํžˆ ์„ค๋ช…ํ•˜๋ผ"],
["์—…๋กœ๋“œ๋œ ๋ฐ์ดํ„ฐ์…‹ ํŒŒ์ผ์„ ํ•™์Šต ๋ฐ์ดํ„ฐ๋กœ ํ™œ์šฉํ•˜์—ฌ, FAQ 20๊ฑด์„ ์ƒ์„ธํ•˜๊ฒŒ ์ž‘์„ฑํ•˜๋ผ. 4000ํ† ํฐ ์ด์ƒ ์‚ฌ์šฉํ•˜๋ผ."],
["์—…๋กœ๋“œ๋œ ๋ฐ์ดํ„ฐ์…‹ ํŒŒ์ผ์„ ํ•™์Šต ๋ฐ์ดํ„ฐ๋กœ ํ™œ์šฉํ•˜์—ฌ, ํŠนํ—ˆ ์ถœ์›์— ํ™œ์šฉํ•  ๊ธฐ์ˆ  ๋ฐ ๋น„์ฆˆ๋‹ˆ์Šค ๋ชจ๋ธ ์ธก๋ฉด์„ ํฌํ•จํ•˜์—ฌ ํŠนํ—ˆ ์ถœ์›์„œ ๊ตฌ์„ฑ์— ๋งž๊ฒŒ ํ˜์‹ ์ ์ธ ์ฐฝ์˜ ๋ฐœ๋ช… ๋‚ด์šฉ์„ ์ค‘์‹ฌ์œผ๋กœ 4000 ํ† ํฐ ์ด์ƒ ์ž‘์„ฑํ•˜๋ผ."],
],
inputs=msg_data_upload,
label="์˜ˆ์ œ ์„ ํƒ",
)
# Parquet ํŒŒ์ผ ์—…๋กœ๋“œ
gr.Markdown("### Parquet ํŒŒ์ผ ์—…๋กœ๋“œ")
with gr.Row():
with gr.Column():
parquet_upload = gr.File(
label="Parquet ํŒŒ์ผ ์—…๋กœ๋“œ", type="filepath", elem_id="parquet-upload-area"
)
parquet_upload_button = gr.Button("์—…๋กœ๋“œ")
parquet_upload_status = gr.Textbox(label="์—…๋กœ๋“œ ์ƒํƒœ", interactive=False)
parquet_preview_chat = gr.Markdown(label="Parquet ํŒŒ์ผ ๋ฏธ๋ฆฌ๋ณด๊ธฐ")
def handle_parquet_upload(file_path: str):
message, parquet_content, parquet_json = upload_parquet(file_path)
if parquet_json:
return message, parquet_content, parquet_json
else:
return message, "", ""
parquet_upload_button.click(
handle_parquet_upload,
inputs=parquet_upload,
outputs=[parquet_upload_status, parquet_preview_chat, parquet_data_state]
)
# ๋‘ ๋ฒˆ์งธ ํƒญ: CSV to My ๋ฐ์ดํ„ฐ์…‹
with gr.Tab("CSV to My ๋ฐ์ดํ„ฐ์…‹"):
gr.Markdown("### CSV ํŒŒ์ผ ์—…๋กœ๋“œ ๋ฐ Parquet ๋ณ€ํ™˜")
with gr.Row():
with gr.Column():
csv_file = gr.File(label="CSV ํŒŒ์ผ ์—…๋กœ๋“œ", type="filepath")
upload_button = gr.Button("์—…๋กœ๋“œ ๋ฐ ๋ณ€ํ™˜")
upload_status = gr.Textbox(label="์—…๋กœ๋“œ ์ƒํƒœ", interactive=False)
parquet_preview = gr.Markdown(label="Parquet ํŒŒ์ผ ๋ฏธ๋ฆฌ๋ณด๊ธฐ")
download_button = gr.File(label="Parquet ํŒŒ์ผ ๋‹ค์šด๋กœ๋“œ", interactive=False)
def handle_csv_upload(file_path: str):
message, parquet_filename = upload_csv(file_path)
if parquet_filename:
parquet_content = load_parquet(parquet_filename)
return message, parquet_content, parquet_filename
else:
return message, "", None
upload_button.click(
handle_csv_upload,
inputs=csv_file,
outputs=[upload_status, parquet_preview, download_button]
)
# ์„ธ ๋ฒˆ์งธ ํƒญ: Text to My ๋ฐ์ดํ„ฐ์…‹
with gr.Tab("Text to My ๋ฐ์ดํ„ฐ์…‹"):
gr.Markdown("### ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•˜๋ฉด CSV๋กœ ๋ณ€ํ™˜ ํ›„ Parquet์œผ๋กœ ์ž๋™ ์ „ํ™˜๋ฉ๋‹ˆ๋‹ค.")
with gr.Row():
with gr.Column():
text_input = gr.Textbox(
label="ํ…์ŠคํŠธ ์ž…๋ ฅ (๊ฐ ํ–‰์€ `id,text,label,metadata` ํ˜•์‹์œผ๋กœ ์ž…๋ ฅ)",
lines=10,
placeholder='์˜ˆ: 1,"์ด์ˆœ์‹ ","์žฅ๊ตฐ","๊ฑฐ๋ถ์„ "\n2,"์›๊ท ","์žฅ๊ตฐ","๋ชจํ•จ"\n3,"์„ ์กฐ","์™•","์‹œ๊ธฐ"\n4,"๋„์š”ํ† ๋ฏธ ํžˆ๋ฐ์š”์‹œ","์™•","์นจ๋žต"'
)
convert_button = gr.Button("๋ณ€ํ™˜ ๋ฐ ๋‹ค์šด๋กœ๋“œ")
convert_status = gr.Textbox(label="๋ณ€ํ™˜ ์ƒํƒœ", interactive=False)
parquet_preview_convert = gr.Markdown(label="Parquet ํŒŒ์ผ ๋ฏธ๋ฆฌ๋ณด๊ธฐ")
download_parquet_convert = gr.File(label="Parquet ํŒŒ์ผ ๋‹ค์šด๋กœ๋“œ", interactive=False)
def handle_text_to_parquet(text: str):
message, parquet_content, parquet_filename = text_to_parquet(text)
if parquet_filename:
return message, parquet_content, parquet_filename
else:
return message, "", None
convert_button.click(
handle_text_to_parquet,
inputs=text_input,
outputs=[convert_status, parquet_preview_convert, download_parquet_convert]
)
# ๋„ค ๋ฒˆ์งธ ํƒญ: Text Preprocessing with LLM
with gr.Tab("Text Preprocessing with LLM"):
gr.Markdown("### ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•˜๋ฉด LLM์ด ๋ฐ์ดํ„ฐ์…‹ ํ˜•์‹์— ๋งž๊ฒŒ ์ „์ฒ˜๋ฆฌํ•˜์—ฌ ์ถœ๋ ฅํ•ฉ๋‹ˆ๋‹ค.")
with gr.Row():
with gr.Column():
raw_text_input = gr.Textbox(
label="ํ…์ŠคํŠธ ์ž…๋ ฅ",
lines=15,
placeholder="์—ฌ๊ธฐ์— ์ „์ฒ˜๋ฆฌํ•  ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”..."
)
with gr.Row():
preprocess_button = gr.Button("์ „์ฒ˜๋ฆฌ ์‹คํ–‰", variant="primary")
clear_button = gr.Button("์ดˆ๊ธฐํ™”")
preprocess_status = gr.Textbox(
label="์ „์ฒ˜๋ฆฌ ์ƒํƒœ",
interactive=False,
value="๋Œ€๊ธฐ ์ค‘..."
)
processed_text_output = gr.Textbox(
label="์ „์ฒ˜๋ฆฌ๋œ ๋ฐ์ดํ„ฐ์…‹ ์ถœ๋ ฅ",
lines=15,
interactive=False
)
convert_to_parquet_button = gr.Button("Parquet์œผ๋กœ ๋ณ€ํ™˜")
download_parquet = gr.File(label="๋ณ€ํ™˜๋œ Parquet ํŒŒ์ผ ๋‹ค์šด๋กœ๋“œ")
def handle_text_preprocessing(input_text: str, api_key: str):
if not api_key:
yield "โš ๏ธ API Key๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค.", ""
return
if not input_text.strip():
yield "์ž…๋ ฅ ํ…์ŠคํŠธ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.", ""
return
try:
yield "์ „์ฒ˜๋ฆฌ๋ฅผ ์‹œ์ž‘ํ•ฉ๋‹ˆ๋‹ค...", ""
processed_text = preprocess_text_with_llm(input_text, api_key)
if processed_text:
yield "์ „์ฒ˜๋ฆฌ๊ฐ€ ์™„๋ฃŒ๋˜์—ˆ์Šต๋‹ˆ๋‹ค.", processed_text
else:
yield "์ „์ฒ˜๋ฆฌ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.", ""
except Exception as e:
yield f"์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}", ""
def clear_inputs():
return "", "๋Œ€๊ธฐ ์ค‘...", ""
def convert_to_parquet_file(processed_text: str):
if not processed_text.strip():
return "๋ณ€ํ™˜ํ•  ํ…์ŠคํŠธ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.", None
try:
message, parquet_content, parquet_filename = text_to_parquet(processed_text)
if parquet_filename:
return message, parquet_filename
return message, None
except Exception as e:
return f"Parquet ๋ณ€ํ™˜ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}", None
preprocess_button.click(
handle_text_preprocessing,
inputs=[raw_text_input, api_key_state],
outputs=[preprocess_status, processed_text_output],
queue=True
)
clear_button.click(
clear_inputs,
outputs=[raw_text_input, preprocess_status, processed_text_output]
)
convert_to_parquet_button.click(
convert_to_parquet_file,
inputs=[processed_text_output],
outputs=[preprocess_status, download_parquet]
)
with gr.Accordion("์˜ˆ์ œ ํ…์ŠคํŠธ", open=False):
gr.Examples(
examples=[
["์ด์ˆœ์‹ ์€ ์กฐ์„  ์ค‘๊ธฐ์˜ ๋ฌด์‹ ์ด๋‹ค. ๊ทธ๋Š” ์ž„์ง„์™œ๋ž€ ๋‹น์‹œ ํ•ด๊ตฐ์„ ์ด๋Œ์—ˆ๋‹ค. ๊ฑฐ๋ถ์„ ์„ ๋งŒ๋“ค์–ด ์™œ๊ตฐ๊ณผ ์‹ธ์› ๋‹ค."],
["์ธ๊ณต์ง€๋Šฅ์€ ์ปดํ“จํ„ฐ ๊ณผํ•™์˜ ํ•œ ๋ถ„์•ผ์ด๋‹ค. ๊ธฐ๊ณ„ํ•™์Šต์€ ์ธ๊ณต์ง€๋Šฅ์˜ ํ•˜์œ„ ๋ถ„์•ผ์ด๋‹ค. ๋”ฅ๋Ÿฌ๋‹์€ ๊ธฐ๊ณ„ํ•™์Šต์˜ ํ•œ ๋ฐฉ๋ฒ•์ด๋‹ค."]
],
inputs=raw_text_input,
label="์˜ˆ์ œ ์„ ํƒ"
)
# ์‚ฌ์šฉ ๋ฐฉ๋ฒ• ํƒญ
with gr.Tab("๐Ÿ“š ์‚ฌ์šฉ ๋ฐฉ๋ฒ•"):
gr.Markdown("""
# MyEzRAG ์‚ฌ์šฉ ๊ฐ€์ด๋“œ
## ๐Ÿ”‘ API Key ์„ค์ •
1. OpenAI API Key๋ฅผ ์ƒ๋‹จ ์ž…๋ ฅ์ฐฝ์— ์ž…๋ ฅ
2. 'API Key ์„ค์ •' ๋ฒ„ํŠผ ํด๋ฆญ
3. ์„ค์ • ์„ฑ๊ณต ๋ฉ”์‹œ์ง€ ํ™•์ธ
## 1๏ธโƒฃ My ๋ฐ์ดํ„ฐ์…‹+LLM ํƒญ
### ๊ธฐ๋Šฅ
- ์—…๋กœ๋“œ๋œ Parquet ๋ฐ์ดํ„ฐ์…‹์„ ๊ธฐ๋ฐ˜์œผ๋กœ LLM๊ณผ ๋Œ€ํ™”
- ๋ฐ์ดํ„ฐ์…‹์˜ ๋‚ด์šฉ์„ ํ™œ์šฉํ•œ ์ฝ˜ํ…์ธ  ์ƒ์„ฑ
### ์‚ฌ์šฉ ๋ฐฉ๋ฒ•
1. Parquet ํŒŒ์ผ ์—…๋กœ๋“œ ์„น์…˜์—์„œ ๋ฐ์ดํ„ฐ์…‹ ํŒŒ์ผ์„ ์—…๋กœ๋“œ
2. ์ฑ„ํŒ…์ฐฝ์— ์›ํ•˜๋Š” ์งˆ๋ฌธ์ด๋‚˜ ์š”์ฒญ์‚ฌํ•ญ ์ž…๋ ฅ
3. ์˜ˆ์ œ ๋ฒ„ํŠผ์„ ํ™œ์šฉํ•˜์—ฌ ๋‹ค์–‘ํ•œ ํ™œ์šฉ ์‚ฌ๋ก€ ์ฒดํ—˜
### ํŒ
- ์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ ์„ค์ •์œผ๋กœ ์‘๋‹ต ์Šคํƒ€์ผ ์กฐ์ • ๊ฐ€๋Šฅ
- ์ƒ์„ธํ•œ ์งˆ๋ฌธ์ผ์ˆ˜๋ก ๋” ์ •ํ™•ํ•œ ๋‹ต๋ณ€ ์ œ๊ณต
---
## 2๏ธโƒฃ CSV to My ๋ฐ์ดํ„ฐ์…‹ ํƒญ
### ๊ธฐ๋Šฅ
- CSV ํŒŒ์ผ์„ Parquet ํ˜•์‹์œผ๋กœ ๋ณ€ํ™˜
- ๋ฐ์ดํ„ฐ ์ตœ์ ํ™” ๋ฐ ์ •์ œ
### ์‚ฌ์šฉ ๋ฐฉ๋ฒ•
1. CSV ํŒŒ์ผ ์ค€๋น„ (ํ•„์ˆ˜ ์ปฌ๋Ÿผ: id, text, label, metadata)
2. ํŒŒ์ผ ์—…๋กœ๋“œ ํ›„ '์—…๋กœ๋“œ ๋ฐ ๋ณ€ํ™˜' ๋ฒ„ํŠผ ํด๋ฆญ
3. ๋ณ€ํ™˜๋œ Parquet ํŒŒ์ผ ๋‹ค์šด๋กœ๋“œ
### ์ฃผ์˜์‚ฌํ•ญ
- CSV ํŒŒ์ผ์€ ๋ฐ˜๋“œ์‹œ ํ•„์ˆ˜ ์ปฌ๋Ÿผ์„ ํฌํ•จํ•ด์•ผ ํ•จ
- ์ธ์ฝ”๋”ฉ์€ UTF-8 ๊ถŒ์žฅ
---
## 3๏ธโƒฃ Text to My ๋ฐ์ดํ„ฐ์…‹ ํƒญ
### ๊ธฐ๋Šฅ
- ํ…์ŠคํŠธ ํ˜•์‹์˜ ๋ฐ์ดํ„ฐ๋ฅผ Parquet์œผ๋กœ ๋ณ€ํ™˜
- ์ˆ˜๋™ ๋ฐ์ดํ„ฐ ์ž…๋ ฅ ์ง€์›
### ์‚ฌ์šฉ ๋ฐฉ๋ฒ•
1. ์ง€์ •๋œ ํ˜•์‹์œผ๋กœ ํ…์ŠคํŠธ ์ž…๋ ฅ
```
1,"์ด์ˆœ์‹ ","์žฅ๊ตฐ","๊ฑฐ๋ถ์„ "
2,"์›๊ท ","์žฅ๊ตฐ","๋ชจํ•จ"
```
2. '๋ณ€ํ™˜ ๋ฐ ๋‹ค์šด๋กœ๋“œ' ๋ฒ„ํŠผ ํด๋ฆญ
3. ๋ณ€ํ™˜๋œ ํŒŒ์ผ ํ™•์ธ ๋ฐ ๋‹ค์šด๋กœ๋“œ
### ์ž…๋ ฅ ํ˜•์‹
- id: ์ˆœ์ฐจ์  ๋ฒˆํ˜ธ
- text: ์‹ค์ œ ํ…์ŠคํŠธ ๋‚ด์šฉ
- label: ๋ถ„๋ฅ˜ ๋ผ๋ฒจ
- metadata: ๋ถ€๊ฐ€ ์ •๋ณด
---
## 4๏ธโƒฃ Text Preprocessing with LLM ํƒญ
### ๊ธฐ๋Šฅ
- LLM์„ ํ™œ์šฉํ•œ ์ž๋™ ํ…์ŠคํŠธ ์ „์ฒ˜๋ฆฌ
- ๊ตฌ์กฐํ™”๋œ ๋ฐ์ดํ„ฐ์…‹ ์ƒ์„ฑ
### ์‚ฌ์šฉ ๋ฐฉ๋ฒ•
1. ์›๋ฌธ ํ…์ŠคํŠธ ์ž…๋ ฅ
2. '์ „์ฒ˜๋ฆฌ ์‹คํ–‰' ๋ฒ„ํŠผ ํด๋ฆญ
3. ๊ฒฐ๊ณผ ํ™•์ธ ํ›„ ํ•„์š”์‹œ Parquet ๋ณ€ํ™˜
### ํŠน์ง•
- ์ž๋™ ๋ ˆ์ด๋ธ”๋ง
- ๋ฌธ์žฅ ๋‹จ์œ„ ๋ถ„๋ฆฌ
- ์ค‘๋ณต ์ œ๊ฑฐ
- ๋ฐ์ดํ„ฐ ์ •๊ทœํ™”
## ๐Ÿ’ก ์ผ๋ฐ˜์ ์ธ ํŒ
- API Key๋Š” ์•ˆ์ „ํ•˜๊ฒŒ ๋ณด๊ด€ํ•˜๊ณ  ์ฃผ๊ธฐ์ ์œผ๋กœ ๊ฐฑ์‹ 
- ๊ฐ ํƒญ์˜ ์˜ˆ์ œ๋ฅผ ์ฐธ๊ณ ํ•˜์—ฌ ์‚ฌ์šฉ๋ฒ• ์ตํžˆ๊ธฐ
- ๋ฐ์ดํ„ฐ ํ’ˆ์งˆ์ด ์ข‹์„์ˆ˜๋ก ๋” ๋‚˜์€ ๊ฒฐ๊ณผ ์ œ๊ณต
- ์˜ค๋ฅ˜ ๋ฐœ์ƒ ์‹œ ์ž…๋ ฅ ๋ฐ์ดํ„ฐ ํ˜•์‹ ํ™•์ธ
- ๋Œ€์šฉ๋Ÿ‰ ์ฒ˜๋ฆฌ ์‹œ ์ ์ ˆํ•œ ์ฒญํฌ ํฌ๊ธฐ๋กœ ๋ถ„ํ•  ์ฒ˜๋ฆฌ
## โš ๏ธ ์ฃผ์˜์‚ฌํ•ญ
- API Key๋ฅผ ํƒ€์ธ๊ณผ ๊ณต์œ ํ•˜์ง€ ์•Š๊ธฐ
- ๋ฏผ๊ฐํ•œ ๊ฐœ์ธ์ •๋ณด ํฌํ•จํ•˜์ง€ ์•Š๊ธฐ
- ๋ฐ์ดํ„ฐ ๋ฐฑ์—… ๊ถŒ์žฅ
- ๋„คํŠธ์›Œํฌ ์ƒํƒœ ํ™•์ธ
- ๋ธŒ๋ผ์šฐ์ € ์บ์‹œ ์ฃผ๊ธฐ์  ์ •๋ฆฌ
## ๐Ÿ” ๋ฌธ์ œ ํ•ด๊ฒฐ
- API Key ์˜ค๋ฅ˜: ํ‚ค ํ˜•์‹ ๋ฐ ์œ ํšจ์„ฑ ํ™•์ธ
- ์˜ค๋ฅ˜ ๋ฐœ์ƒ ์‹œ ์ž…๋ ฅ ๋ฐ์ดํ„ฐ ํ˜•์‹ ํ™•์ธ
- ํŒŒ์ผ ์—…๋กœ๋“œ ์‹คํŒจ ์‹œ ํŒŒ์ผ ํฌ๊ธฐ ๋ฐ ํ˜•์‹ ํ™•์ธ
- ๋ณ€ํ™˜ ์‹คํŒจ ์‹œ ๋ฐ์ดํ„ฐ ์ธ์ฝ”๋”ฉ ํ™•์ธ
- ์‘๋‹ต์ด ๋Š๋ฆด ๊ฒฝ์šฐ ๋ฐ์ดํ„ฐ ํฌ๊ธฐ ์กฐ์ •
""")
gr.Markdown("### [email protected]", elem_id="initial-description")
if __name__ == "__main__":
demo.launch(share=True)