ElPlaguister commited on
Commit
aa0f2ec
โ€ข
1 Parent(s): aff19eb

Feat New Design with Dynamic Tabs

Browse files
Files changed (5) hide show
  1. README.txt +19 -0
  2. app.py +15 -46
  3. koalpaca.py +3 -1
  4. model.py +10 -2
  5. t5.py +2 -0
README.txt ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # KOMUCHAT : Korean community-style relationship counseling chabot
2
+ KOMUChat: ์˜จ๋ผ์ธ ์ปค๋ฎค๋‹ˆํ‹ฐ ์Šคํƒ€์ผ์˜ ์—ฐ์• ์ƒ๋‹ด์ฑ—๋ด‡
3
+ ### ๊ฐœ์š”
4
+ ์‚ฌ๋žŒ๋“ค์˜ ์‹ค์ œ ๋Œ€ํ™”๋ฅผ ๋ฐ˜์˜ํ•˜๊ธฐ ์œ„ํ•ด ํ•œ๊ตญ ์ปค๋ฎค๋‹ˆํ‹ฐ ์—ํŽจ์ฝ”๋ฆฌ์•„ + ์ธ์Šคํ‹ฐ์ฆˆ์—์„œ ์ˆ˜์ง‘ํ•œ ์•ฝ 3๋งŒ๊ฑด์˜ ์งˆ๋ฌธ-๋‹ต๋ณ€ ๋Œ€ํ™” ๋ฐ์ดํ„ฐ์…‹์œผ๋กœ ํ•™์Šต์‹œํ‚จ ์ฑ—๋ด‡<br>
5
+ ์ด๋ณ„, ๋ฐ์ดํŠธ์ฝ”์Šค, ์„ ๋ฌผ ๋“ฑ ์—ฐ์• ์™€ ๊ด€๋ จ๋œ ์งˆ๋ฌธ์— ๋Œ€ํ•ด ์˜จ๋ผ์ธ ์ปค๋ฎค๋‹ˆํ‹ฐ์—์„œ ์ƒ๋‹ดํ•˜๋Š” ๊ฒƒ ๊ฐ™์ด ์‘๋‹ตํ•ฉ๋‹ˆ๋‹ค.<br>
6
+ <b>paust-t5-large</b>๋ชจ๋ธ(ํ‰๊ท  ์‘๋‹ต์‹œ๊ฐ„ n์ดˆ)๊ณผ <b>polyglot-ko-12.8b-safetensors</b> ๋ชจ๋ธ(ํ‰๊ท  ์‘๋‹ต์‹œ๊ฐ„ n์ดˆ)์„ ํŒŒ์ธํŠœ๋‹ํ•˜์—ฌ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค.<br>
7
+ ๋ฐ๋ชจ์—์„œ ์„ ํƒํ•˜์—ฌ ์ด์šฉํ•ด๋ณด์‹ค ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค.
8
+
9
+ ### ์‘๋‹ต์˜ˆ์‹œ
10
+ | Question | Answer |
11
+ | --- | --- |
12
+ | ํ—ค์–ด์กŒ์๋‹ˆ๋‹ค | ํž˜๋‚ด ๋” ์ข‹์€ ์ธ์—ฐ ๋‚˜ํƒ€๋‚ ๊ฑฐ์•ผ |
13
+ | ๋“œ๋ผ๋งˆ๋งŒ ์ฃผ๊ตฌ์žฅ์ฐฝ ๋ณด๋Š”์ค‘ | ๋ญ ๋ณด๊ณ  ์žˆ์œผ์‹ ๊ฐ€์š” |
14
+ | ๊ฒฐํ˜ผ๊นŒ์ง€ ์ƒ๊ฐํ•˜๋Š” ์ƒํƒœ์ผ์ˆ˜๋ก ๋” ๊น๊นํ•˜๊ฒŒ ๋”ฐ์ง€๊ฒŒ ๋ผ? | ์•„๋ฌด๋ž˜๋„ ๊ทธ๋Ÿฐ ๊ฑฐ ๊ฐ™์•„ |
15
+ | ISTJ ์—ฌ์ž๊ฐ€ ๋‚จ์žํ•œํ…Œ ๊ด€์‹ฌ์žˆ์œผ๋ฉด ์–ด๋–ป๊ฒŒ ํ•ด? | ์—ฐ๋ฝ ์ž์ฃผ ํ•˜๊ณ  ์ทจํ–ฅ ๋งŽ์ด ๋ฌผ์–ด๋ณด๊ณ  ์ž์ฃผ๋Š” ์•„๋‹ˆ์–ด๋„ ๊พธ์ค€ํžˆ ๋งŒ๋‚˜๋ ค ํ•จ |
16
+ | #์ฒญ์ž#๋“ค์•„ ์—ฐ์• ๋Š” ๊ฐ‘์ž๊ธฐ ์‹œ์ž‘ํ•œ๋‹ค๊ณ  ์ƒ๊ฐํ•ด? | ๋‚˜๋Š” ์ง„์ž ์‹ ๊ธฐํ•œ ๊ฒŒ ์—ฐ์• ์— ๋Œ€ํ•œ ๊ธฐ๋Œ€๊ฐ€ ๋–จ์–ด์ง€๋ฉด ํ•˜๊ณ  ์žˆ๋”๋ž”ใ…‹ใ…‹ใ…‹ |
17
+
18
+ ### ์‚ฌ์šฉ๊ธฐ์ˆ 
19
+ <img src="https://img.shields.io/badge/Python-3776AB?style=for-the-badge&logo=Python&logoColor=white"><img src="https://img.shields.io/badge/PyTorch-EE4C2C?style=for-the-badge&logo=PyTorch&logoColor=white">
app.py CHANGED
@@ -6,7 +6,6 @@ from koalpaca import KoAlpaca
6
  LOCAL_TEST = False
7
  MODEL_STRS = ['T5', 'KoAlpaca']
8
  MODELS = []
9
- cur_index = 0
10
 
11
  def prepare_theme():
12
  theme = gr.themes.Default(primary_hue=colors.gray,
@@ -42,56 +41,26 @@ def prepare_theme():
42
  )
43
  return theme
44
 
45
- def chat(message, chat_history):
46
- response = MODELS[cur_index].generate(message)
47
- chat_history.append((message, response))
48
- return "", gr.Chatbot(chat_history, label=MODEL_STRS[cur_index], bubble_full_width=False)
49
-
50
- def change_model_index(idx, chatbot):
51
- global cur_index
52
- cur_index = idx
53
- # print(cur_index)
54
- return gr.Chatbot(chatbot, label=MODEL_STRS[cur_index], bubble_full_width=False)
55
-
56
  if __name__=='__main__':
57
  theme = prepare_theme()
58
-
 
 
59
  MODELS.append(T5())
60
  if not LOCAL_TEST:
61
  MODELS.append(KoAlpaca())
62
 
63
- with gr.Blocks(theme=theme) as demo:
64
- with gr.Tab("์†Œ๊ฐœ"):
65
- gr.Markdown(
66
- """
67
- # KOMUCHAT : Korean community-style relationship counseling chabot
68
- KOMUChat: ์˜จ๋ผ์ธ ์ปค๋ฎค๋‹ˆํ‹ฐ ์Šคํƒ€์ผ์˜ ์—ฐ์• ์ƒ๋‹ด์ฑ—๋ด‡
69
- ### ๊ฐœ์š”
70
- ์‚ฌ๋žŒ๋“ค์˜ ์‹ค์ œ ๋Œ€ํ™”๋ฅผ ๋ฐ˜์˜ํ•˜๊ธฐ ์œ„ํ•ด ํ•œ๊ตญ ์ปค๋ฎค๋‹ˆํ‹ฐ ์—ํŽจ์ฝ”๋ฆฌ์•„ + ์ธ์Šคํ‹ฐ์ฆˆ์—์„œ ์ˆ˜์ง‘ํ•œ ์•ฝ 3๋งŒ๊ฑด์˜ ์งˆ๋ฌธ-๋‹ต๋ณ€ ๋Œ€ํ™” ๋ฐ์ดํ„ฐ์…‹์œผ๋กœ ํ•™์Šต์‹œํ‚จ ์ฑ—๋ด‡<br>
71
- ์ด๋ณ„, ๋ฐ์ดํŠธ์ฝ”์Šค, ์„ ๋ฌผ ๋“ฑ ์—ฐ์• ์™€ ๊ด€๋ จ๋œ ์งˆ๋ฌธ์— ๋Œ€ํ•ด ์˜จ๋ผ์ธ ์ปค๋ฎค๋‹ˆํ‹ฐ์—์„œ ์ƒ๋‹ดํ•˜๋Š” ๊ฒƒ ๊ฐ™์ด ์‘๋‹ตํ•ฉ๋‹ˆ๋‹ค.<br>
72
- <b>paust-t5-large</b>๋ชจ๋ธ(ํ‰๊ท  ์‘๋‹ต์‹œ๊ฐ„ n์ดˆ)๊ณผ <b>polyglot-ko-12.8b-safetensors</b> ๋ชจ๋ธ(ํ‰๊ท  ์‘๋‹ต์‹œ๊ฐ„ n์ดˆ)์„ ํŒŒ์ธํŠœ๋‹ํ•˜์—ฌ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค.<br>
73
- ๋ฐ๋ชจ์—์„œ ์„ ํƒํ•˜์—ฌ ์ด์šฉํ•ด๋ณด์‹ค ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค.
74
-
75
- ### ์‘๋‹ต์˜ˆ์‹œ
76
- | Question | Answer |
77
- | --- | --- |
78
- | ํ—ค์–ด์กŒ์๋‹ˆ๋‹ค | ํž˜๋‚ด ๋” ์ข‹์€ ์ธ์—ฐ ๋‚˜ํƒ€๋‚ ๊ฑฐ์•ผ |
79
- | ๋“œ๋ผ๋งˆ๋งŒ ์ฃผ๊ตฌ์žฅ์ฐฝ ๋ณด๋Š”์ค‘ | ๋ญ ๋ณด๊ณ  ์žˆ์œผ์‹ ๊ฐ€์š” |
80
- | ๊ฒฐํ˜ผ๊นŒ์ง€ ์ƒ๊ฐํ•˜๋Š” ์ƒํƒœ์ผ์ˆ˜๋ก ๋” ๊น๊นํ•˜๊ฒŒ ๋”ฐ์ง€๊ฒŒ ๋ผ? | ์•„๋ฌด๋ž˜๋„ ๊ทธ๋Ÿฐ ๊ฑฐ ๊ฐ™์•„ |
81
- | ISTJ ์—ฌ์ž๊ฐ€ ๋‚จ์žํ•œํ…Œ ๊ด€์‹ฌ์žˆ์œผ๋ฉด ์–ด๋–ป๊ฒŒ ํ•ด? | ์—ฐ๋ฝ ์ž์ฃผ ํ•˜๊ณ  ์ทจํ–ฅ ๋งŽ์ด ๋ฌผ์–ด๋ณด๊ณ  ์ž์ฃผ๋Š” ์•„๋‹ˆ์–ด๋„ ๊พธ์ค€ํžˆ ๋งŒ๋‚˜๋ ค ํ•จ |
82
- | #์ฒญ์ž#๋“ค์•„ ์—ฐ์• ๋Š” ๊ฐ‘์ž๊ธฐ ์‹œ์ž‘ํ•œ๋‹ค๊ณ  ์ƒ๊ฐํ•ด? | ๋‚˜๋Š” ์ง„์ž ์‹ ๊ธฐํ•œ ๊ฒŒ ์—ฐ์• ์— ๋Œ€ํ•œ ๊ธฐ๋Œ€๊ฐ€ ๋–จ์–ด์ง€๋ฉด ํ•˜๊ณ  ์žˆ๋”๋ž”ใ…‹ใ…‹ใ…‹ |
83
-
84
- ### ์‚ฌ์šฉ๊ธฐ์ˆ 
85
- <img src="https://img.shields.io/badge/Python-3776AB?style=for-the-badge&logo=Python&logoColor=white"><img src="https://img.shields.io/badge/PyTorch-EE4C2C?style=for-the-badge&logo=PyTorch&logoColor=white">
86
- """)
87
- with gr.Tab("๋ฐ๋ชจ"):
88
- with gr.Row():
89
- rd = gr.Radio(MODEL_STRS, value='T5', type='index', label='Model Selection', show_label=True, interactive=True)
90
- with gr.Column(scale=5): # ์ฑ—๋ด‡ ๋ถ€๋ถ„
91
- chatbot = gr.Chatbot(label="T5", bubble_full_width=False)
92
- with gr.Row():
93
- txt = gr.Textbox(show_label=False, placeholder='์—ฐ์•  ๊ด€๋ จ ์งˆ๋ฌธ์„ ์ž…๋ ฅํ•˜์„ธ์š”!', container=False)
94
 
95
- txt.submit(chat, [txt, chatbot], [txt, chatbot])
96
- rd.select(change_model_index, [rd, chatbot], [chatbot])
97
  demo.launch(debug=True, share=True)
 
6
  LOCAL_TEST = False
7
  MODEL_STRS = ['T5', 'KoAlpaca']
8
  MODELS = []
 
9
 
10
  def prepare_theme():
11
  theme = gr.themes.Default(primary_hue=colors.gray,
 
41
  )
42
  return theme
43
 
 
 
 
 
 
 
 
 
 
 
 
44
  if __name__=='__main__':
45
  theme = prepare_theme()
46
+ with open('README.txt', 'r') as f:
47
+ readme = f.read()
48
+
49
  MODELS.append(T5())
50
  if not LOCAL_TEST:
51
  MODELS.append(KoAlpaca())
52
 
53
+ with gr.Blocks(theme=prepare_theme()) as demo:
54
+ with gr.Column():
55
+ gr.HTML("<h1>KOMUChat</h1>")
56
+ with gr.Tab("์†Œ๊ฐœ"):
57
+ gr.Markdown(readme)
58
+ for i in range(len(MODELS)):
59
+ with gr.Tab(MODEL_STRS[i], id=i):
60
+ with gr.Column():
61
+ chatbot = gr.Chatbot(label=MODEL_STRS[i], bubble_full_width=False)
62
+ txt = gr.Textbox(show_label=False, placeholder='์—ฐ์•  ๊ด€๋ จ ์งˆ๋ฌธ์„ ์ž…๋ ฅํ•˜์„ธ์š”!', container=False, elem_id=i)
63
+
64
+ txt.submit(MODELS[i].chat, [txt, chatbot], [txt, chatbot])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65
 
 
 
66
  demo.launch(debug=True, share=True)
koalpaca.py CHANGED
@@ -4,7 +4,9 @@ from peft import PeftModel, PeftConfig
4
  from model import Model
5
 
6
  class KoAlpaca(Model):
7
- def __init__(self):
 
 
8
  peft_model_id = "4n3mone/Komuchat-koalpaca-polyglot-12.8B"
9
  config = PeftConfig.from_pretrained(peft_model_id)
10
  self.bnb_config = BitsAndBytesConfig(
 
4
  from model import Model
5
 
6
  class KoAlpaca(Model):
7
+ def __init__(self,
8
+ name:str='KoAlpaca'):
9
+ self.name = name
10
  peft_model_id = "4n3mone/Komuchat-koalpaca-polyglot-12.8B"
11
  config = PeftConfig.from_pretrained(peft_model_id)
12
  self.bnb_config = BitsAndBytesConfig(
model.py CHANGED
@@ -1,5 +1,8 @@
 
1
  class Model:
2
- def __init__(self):
 
 
3
  self.model = None
4
  self.tokenizer = None
5
  self.gen_config = None
@@ -7,4 +10,9 @@ class Model:
7
 
8
  def generate(self, inputs:str) -> str:
9
  outputs = inputs
10
- return outputs
 
 
 
 
 
 
1
+ import gradio as gr
2
  class Model:
3
+ def __init__(self,
4
+ name:str="Model"):
5
+ self.name = name
6
  self.model = None
7
  self.tokenizer = None
8
  self.gen_config = None
 
10
 
11
  def generate(self, inputs:str) -> str:
12
  outputs = inputs
13
+ return outputs
14
+
15
+ def chat(self, message, chat_history):
16
+ response = self.generate(message)
17
+ chat_history.append((message, response))
18
+ return "", gr.Chatbot(chat_history, label=self.name, bubble_full_width=False)
t5.py CHANGED
@@ -2,10 +2,12 @@ from transformers import T5TokenizerFast, T5ForConditionalGeneration, Generation
2
  from model import Model
3
  class T5(Model):
4
  def __init__(self,
 
5
  model_dir:str='./models/pko_t5_COMU_patience10',
6
  max_input_length:int=64,
7
  max_target_length:int=64
8
  ):
 
9
  self.model = T5ForConditionalGeneration.from_pretrained(model_dir)
10
  self.tokenizer = T5TokenizerFast.from_pretrained(model_dir)
11
  self.gen_config = GenerationConfig.from_pretrained(model_dir, 'gen_config.json')
 
2
  from model import Model
3
  class T5(Model):
4
  def __init__(self,
5
+ name:str='T5',
6
  model_dir:str='./models/pko_t5_COMU_patience10',
7
  max_input_length:int=64,
8
  max_target_length:int=64
9
  ):
10
+ self.name = name
11
  self.model = T5ForConditionalGeneration.from_pretrained(model_dir)
12
  self.tokenizer = T5TokenizerFast.from_pretrained(model_dir)
13
  self.gen_config = GenerationConfig.from_pretrained(model_dir, 'gen_config.json')