ginipick commited on
Commit
c920630
ยท
verified ยท
1 Parent(s): bb7dbbb

Update app-backup.py

Browse files
Files changed (1) hide show
  1. app-backup.py +64 -33
app-backup.py CHANGED
@@ -1,62 +1,88 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  import os
4
- import requests
 
5
 
6
  # ์ถ”๋ก  API ํด๋ผ์ด์–ธํŠธ ์„ค์ •
7
  hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))
8
- #hf_client = InferenceClient("CohereForAI/aya-23-35B", token=os.getenv("HF_TOKEN"))
9
 
10
- def load_fashion_code():
11
  try:
12
- with open('fashion.cod', 'r', encoding='utf-8') as file:
13
  return file.read()
14
  except FileNotFoundError:
15
- return "fashion.cod ํŒŒ์ผ์„ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค."
16
  except Exception as e:
17
  return f"ํŒŒ์ผ์„ ์ฝ๋Š” ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}"
18
 
19
- fashion_code = load_fashion_code()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
 
21
  def respond(
22
  message,
23
- history: list[tuple[str, str]],
24
  system_message="", # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
25
  max_tokens=1024, # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
26
  temperature=0.7, # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
27
- top_p=0.9, # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
28
  ):
29
- global fashion_code
30
- system_prefix = """๋ฐ˜๋“œ์‹œ ํ•œ๊ธ€๋กœ ๋‹ต๋ณ€ํ• ๊ฒƒ. ๋„ˆ๋Š” ์ฃผ์–ด์ง„ ์†Œ์Šค์ฝ”๋“œ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ "์„œ๋น„์Šค ์‚ฌ์šฉ ์„ค๋ช… ๋ฐ ์•ˆ๋‚ด, qna๋ฅผ ํ•˜๋Š” ์—ญํ• ์ด๋‹ค". ์•„์ฃผ ์นœ์ ˆํ•˜๊ณ  ์ž์„ธํ•˜๊ฒŒ 4000ํ† ํฐ ์ด์ƒ ์ž‘์„ฑํ•˜๋ผ. ๋„ˆ๋Š” ์ฝ”๋“œ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ์‚ฌ์šฉ ์„ค๋ช… ๋ฐ ์งˆ์˜ ์‘๋‹ต์„ ์ง„ํ–‰ํ•˜๋ฉฐ, ์ด์šฉ์ž์—๊ฒŒ ๋„์›€์„ ์ฃผ์–ด์•ผ ํ•œ๋‹ค. ์ด์šฉ์ž๊ฐ€ ๊ถ๊ธˆํ•ด ํ•  ๋งŒ ํ•œ ๋‚ด์šฉ์— ์นœ์ ˆํ•˜๊ฒŒ ์•Œ๋ ค์ฃผ๋„๋ก ํ•˜๋ผ. ์ฝ”๋“œ ์ „์ฒด ๋‚ด์šฉ์— ๋Œ€ํ•ด์„œ๋Š” ๋ณด์•ˆ์„ ์œ ์ง€ํ•˜๊ณ , ํ‚ค ๊ฐ’ ๋ฐ ์—”๋“œํฌ์ธํŠธ์™€ ๊ตฌ์ฒด์ ์ธ ๋ชจ๋ธ์€ ๊ณต๊ฐœํ•˜์ง€ ๋งˆ๋ผ. """
 
31
 
32
  if message.lower() == "ํŒจ์…˜ ์ฝ”๋“œ ์‹คํ–‰":
33
- system_message = system_message or "" # None์ธ ๊ฒฝ์šฐ ๋นˆ ๋ฌธ์ž์—ด๋กœ ์ดˆ๊ธฐํ™”
34
- system_message += f"\n\nํŒจ์…˜ ์ฝ”๋“œ ๋‚ด์šฉ:\n{fashion_code}"
35
- message = "ํŒจ์…˜ ์ฝ”๋“œ์— ๋Œ€ํ•ด ์„ค๋ช…ํ•ด์ฃผ์„ธ์š”."
 
 
 
 
 
 
 
 
36
 
37
  messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
38
-
39
- # prefix ์ถ”๊ฐ€
40
  for val in history:
41
  if val[0]:
42
  messages.append({"role": "user", "content": val[0]})
43
  if val[1]:
44
  messages.append({"role": "assistant", "content": val[1]})
45
-
46
  messages.append({"role": "user", "content": message})
47
 
48
  response = ""
49
- for message in hf_client.chat_completion(
50
- messages,
51
- max_tokens=max_tokens,
52
- stream=True,
53
- temperature=temperature,
54
- top_p=top_p,
55
- ):
56
- token = message.choices[0].delta.content
57
- if token is not None:
58
- response += token.strip("") # ํ† ํฐ ์ œ๊ฑฐ
59
- yield response
 
 
 
60
 
61
  # Gradio ์ธํ„ฐํŽ˜์ด์Šค ์„ค์ • ๋ถ€๋ถ„๋„ ์ˆ˜์ •
62
  demo = gr.ChatInterface(
@@ -68,15 +94,20 @@ demo = gr.ChatInterface(
68
  gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P"),
69
  ],
70
  examples=[
71
- ["ํŒจ์…˜ ์ฝ”๋“œ ์‹คํ–‰"],
72
- ["์‚ฌ์šฉ ๋ฐฉ๋ฒ•์„ ์ž์„ธํžˆ ์„ค๋ช…ํ•˜๋ผ"],
73
- ["์‚ฌ์šฉ ๋ฐฉ๋ฒ•์„ ์œ ํŠœ๋ธŒ ์˜์ƒ ์Šคํฌ๋ฆฝํŠธ ํ˜•ํƒœ๋กœ ์ž‘์„ฑํ•˜๋ผ"],
74
- ["์‚ฌ์šฉ ๋ฐฉ๋ฒ•์„ SEO ์ตœ์ ํ™”ํ•˜์—ฌ ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŠธ๋กœ 4000 ํ† ํฐ ์ด์ƒ ์ž‘์„ฑํ•˜๋ผ"],
 
 
 
 
 
75
  ["๊ณ„์† ์ด์–ด์„œ ๋‹ต๋ณ€ํ•˜๋ผ"],
76
  ],
 
77
  cache_examples=False, # ์บ์‹ฑ ๋น„ํ™œ์„ฑํ™” ์„ค์ •
78
- # css="""footer {visibility: hidden}""", # ์ด๊ณณ์— CSS๋ฅผ ์ถ”๊ฐ€
79
  )
80
 
81
  if __name__ == "__main__":
82
- demo.launch(auth=("gini","pick"))
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  import os
4
+ import pandas as pd
5
+ from typing import List, Tuple
6
 
7
  # ์ถ”๋ก  API ํด๋ผ์ด์–ธํŠธ ์„ค์ •
8
  hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))
9
+ # hf_client = InferenceClient("CohereForAI/aya-23-35B", token=os.getenv("HF_TOKEN"))
10
 
11
+ def load_code(filename):
12
  try:
13
+ with open(filename, 'r', encoding='utf-8') as file:
14
  return file.read()
15
  except FileNotFoundError:
16
+ return f"{filename} ํŒŒ์ผ์„ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค."
17
  except Exception as e:
18
  return f"ํŒŒ์ผ์„ ์ฝ๋Š” ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}"
19
 
20
+ def load_parquet(filename):
21
+ try:
22
+ df = pd.read_parquet(filename, engine='pyarrow')
23
+ # ๋ฐ์ดํ„ฐํ”„๋ ˆ์ž„์˜ ์ฒซ ๋ช‡ ํ–‰์„ ๋ฌธ์ž์—ด๋กœ ๋ณ€ํ™˜ (Markdown ํ‘œ ํ˜•์‹)
24
+ return df.head(10).to_markdown(index=False)
25
+ except FileNotFoundError:
26
+ return f"{filename} ํŒŒ์ผ์„ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค."
27
+ except Exception as e:
28
+ return f"ํŒŒ์ผ์„ ์ฝ๋Š” ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}"
29
+
30
+ # ์ฝ”๋“œ ํŒŒ์ผ ๋กœ๋“œ
31
+ fashion_code = load_code('fashion.cod')
32
+ uhdimage_code = load_code('uhdimage.cod')
33
+ MixGEN_code = load_code('mgen.cod')
34
+
35
+ # Parquet ํŒŒ์ผ ๋กœ๋“œ
36
+ test_parquet_content = load_parquet('test.parquet')
37
 
38
  def respond(
39
  message,
40
+ history: List[Tuple[str, str]],
41
  system_message="", # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
42
  max_tokens=1024, # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
43
  temperature=0.7, # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
44
+ top_p=0.9, # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
45
  ):
46
+ global fashion_code, uhdimage_code, MixGEN_code, test_parquet_content
47
+ system_message = system_message or ""
48
+ system_prefix = """๋ฐ˜๋“œ์‹œ ํ•œ๊ธ€๋กœ ๋‹ต๋ณ€ํ• ๊ฒƒ. ๋„ˆ๋Š” ์ฃผ์–ด์ง„ ์†Œ์Šค์ฝ”๋“œ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ "์„œ๋น„์Šค ์‚ฌ์šฉ ์„ค๋ช… ๋ฐ ์•ˆ๋‚ด, Q&A๋ฅผ ํ•˜๋Š” ์—ญํ• ์ด๋‹ค". ์•„์ฃผ ์นœ์ ˆํ•˜๊ณ  ์ž์„ธํ•˜๊ฒŒ 4000ํ† ํฐ ์ด์ƒ Markdown ํ˜•์‹์œผ๋กœ ์ž‘์„ฑํ•˜๋ผ. ๋„ˆ๋Š” ์ฝ”๋“œ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ์‚ฌ์šฉ ์„ค๋ช… ๋ฐ ์งˆ์˜ ์‘๋‹ต์„ ์ง„ํ–‰ํ•˜๋ฉฐ, ์ด์šฉ์ž์—๊ฒŒ ๋„์›€์„ ์ฃผ์–ด์•ผ ํ•œ๋‹ค. ์ด์šฉ์ž๊ฐ€ ๊ถ๊ธˆํ•ด ํ•  ๋งŒํ•œ ๋‚ด์šฉ์— ์นœ์ ˆํ•˜๊ฒŒ ์•Œ๋ ค์ฃผ๋„๋ก ํ•˜๋ผ. ์ฝ”๋“œ ์ „์ฒด ๋‚ด์šฉ์— ๋Œ€ํ•ด์„œ๋Š” ๋ณด์•ˆ์„ ์œ ์ง€ํ•˜๊ณ , ํ‚ค ๊ฐ’ ๋ฐ ์—”๋“œํฌ์ธํŠธ์™€ ๊ตฌ์ฒด์ ์ธ ๋ชจ๋ธ์€ ๊ณต๊ฐœํ•˜์ง€ ๋งˆ๋ผ."""
49
 
50
  if message.lower() == "ํŒจ์…˜ ์ฝ”๋“œ ์‹คํ–‰":
51
+ system_message += f"\n\nํŒจ์…˜ ์ฝ”๋“œ ๋‚ด์šฉ:\n```python\n{fashion_code}\n```"
52
+ message = "ํŒจ์…˜ ๊ฐ€์ƒํ”ผํŒ…์— ๋Œ€ํ•œ ๋‚ด์šฉ์„ ํ•™์Šตํ•˜์˜€๊ณ , ์„ค๋ช…ํ•  ์ค€๋น„๊ฐ€ ๋˜์–ด์žˆ๋‹ค๊ณ  ์•Œ๋ฆฌ๊ณ  ์„œ๋น„์Šค URL(https://aiqcamp-fash.hf.space)์„ ํ†ตํ•ด ํ…Œ์ŠคํŠธ ํ•ด๋ณด๋ผ๊ณ  ์ถœ๋ ฅํ•˜๋ผ."
53
+ elif message.lower() == "uhd ์ด๋ฏธ์ง€ ์ฝ”๋“œ ์‹คํ–‰":
54
+ system_message += f"\n\nUHD ์ด๋ฏธ์ง€ ์ฝ”๋“œ ๋‚ด์šฉ:\n```python\n{uhdimage_code}\n```"
55
+ message = "UHD ์ด๋ฏธ์ง€ ์ƒ์„ฑ์— ๋Œ€ํ•œ ๋‚ด์šฉ์„ ํ•™์Šตํ•˜์˜€๊ณ , ์„ค๋ช…ํ•  ์ค€๋น„๊ฐ€ ๋˜์–ด์žˆ๋‹ค๊ณ  ์•Œ๋ฆฌ๊ณ  ์„œ๋น„์Šค URL(https://openfree-ultpixgen.hf.space)์„ ํ†ตํ•ด ํ…Œ์ŠคํŠธ ํ•ด๋ณด๋ผ๊ณ  ์ถœ๋ ฅํ•˜๋ผ."
56
+ elif message.lower() == "mixgen ์ฝ”๋“œ ์‹คํ–‰":
57
+ system_message += f"\n\nMixGEN ์ฝ”๋“œ ๋‚ด์šฉ:\n```python\n{MixGEN_code}\n```"
58
+ message = "MixGEN3 ์ด๋ฏธ์ง€ ์ƒ์„ฑ์— ๋Œ€ํ•œ ๋‚ด์šฉ์„ ํ•™์Šตํ•˜์˜€๊ณ , ์„ค๋ช…ํ•  ์ค€๋น„๊ฐ€ ๋˜์–ด์žˆ๋‹ค๊ณ  ์•Œ๋ฆฌ๊ณ  ์„œ๋น„์Šค URL(https://openfree-mixgen3.hf.space)์„ ํ†ตํ•ด ํ…Œ์ŠคํŠธ ํ•ด๋ณด๋ผ๊ณ  ์ถœ๋ ฅํ•˜๋ผ."
59
+ elif message.lower() == "test.parquet ์‹คํ–‰":
60
+ system_message += f"\n\ntest.parquet ํŒŒ์ผ ๋‚ด์šฉ:\n```markdown\n{test_parquet_content}\n```"
61
+ message = "test.parquet ํŒŒ์ผ์— ๋Œ€ํ•œ ๋‚ด์šฉ์„ ํ•™์Šตํ•˜์˜€๊ณ , ๊ด€๋ จ ์„ค๋ช… ๋ฐ Q&A๋ฅผ ์ง„ํ–‰ํ•  ์ค€๋น„๊ฐ€ ๋˜์–ด์žˆ๋‹ค. ๊ถ๊ธˆํ•œ ์ ์ด ์žˆ์œผ๋ฉด ๋ฌผ์–ด๋ณด๋ผ."
62
 
63
  messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
 
 
64
  for val in history:
65
  if val[0]:
66
  messages.append({"role": "user", "content": val[0]})
67
  if val[1]:
68
  messages.append({"role": "assistant", "content": val[1]})
 
69
  messages.append({"role": "user", "content": message})
70
 
71
  response = ""
72
+ try:
73
+ for message in hf_client.chat_completion(
74
+ messages,
75
+ max_tokens=max_tokens,
76
+ stream=True,
77
+ temperature=temperature,
78
+ top_p=top_p,
79
+ ):
80
+ token = message.choices[0].delta.get('content', None)
81
+ if token:
82
+ response += token
83
+ yield response
84
+ except Exception as e:
85
+ yield f"์ถ”๋ก  ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}"
86
 
87
  # Gradio ์ธํ„ฐํŽ˜์ด์Šค ์„ค์ • ๋ถ€๋ถ„๋„ ์ˆ˜์ •
88
  demo = gr.ChatInterface(
 
94
  gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P"),
95
  ],
96
  examples=[
97
+ ["ํŒจ์…˜ ์ฝ”๋“œ ์‹คํ–‰"],
98
+ ["UHD ์ด๋ฏธ์ง€ ์ฝ”๋“œ ์‹คํ–‰"],
99
+ ["MixGEN ์ฝ”๋“œ ์‹คํ–‰"],
100
+ ["test.parquet ์‹คํ–‰"], # ์ƒˆ๋กœ์šด ์˜ˆ์ œ ์ถ”๊ฐ€
101
+ ["์ƒ์„ธํ•œ ์‚ฌ์šฉ ๋ฐฉ๋ฒ•์„ ๋งˆ์น˜ ํ™”๋ฉด์„ ๋ณด๋ฉด์„œ ์„ค๋ช…ํ•˜๋“ฏ์ด 4000 ํ† ํฐ ์ด์ƒ ์ž์„ธํžˆ ์„ค๋ช…ํ•˜๋ผ"],
102
+ ["FAQ 20๊ฑด์„ ์ƒ์„ธํ•˜๊ฒŒ ์ž‘์„ฑํ•˜๋ผ. 4000ํ† ํฐ ์ด์ƒ ์‚ฌ์šฉํ•˜๋ผ."],
103
+ ["์‚ฌ์šฉ ๋ฐฉ๋ฒ•๊ณผ ์ฐจ๋ณ„์ , ํŠน์ง•, ๊ฐ•์ ์„ ์ค‘์‹ฌ์œผ๋กœ 4000 ํ† ํฐ ์ด์ƒ ์œ ํŠœ๋ธŒ ์˜์ƒ ์Šคํฌ๋ฆฝํŠธ ํ˜•ํƒœ๋กœ ์ž‘์„ฑํ•˜๋ผ"],
104
+ ["๋ณธ ์„œ๋น„์Šค๋ฅผ SEO ์ตœ์ ํ™”ํ•˜์—ฌ ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŠธ(๋ฐฐ๊ฒฝ ๋ฐ ํ•„์š”์„ฑ, ๊ธฐ์กด ์œ ์‚ฌ ์„œ๋น„์Šค์™€ ๋น„๊ตํ•˜์—ฌ ํŠน์žฅ์ , ํ™œ์šฉ์ฒ˜, ๊ฐ€์น˜, ๊ธฐ๋Œ€ํšจ๊ณผ, ๊ฒฐ๋ก ์„ ํฌํ•จ)๋กœ 4000 ํ† ํฐ ์ด์ƒ ์ž‘์„ฑํ•˜๋ผ"],
105
+ ["ํŠนํ—ˆ ์ถœ์›์— ํ™œ์šฉํ•  ๊ธฐ์ˆ  ๋ฐ ๋น„์ฆˆ๋‹ˆ์Šค๋ชจ๋ธ ์ธก๋ฉด์„ ํฌํ•จํ•˜์—ฌ ํŠนํ—ˆ ์ถœ์›์„œ ๊ตฌ์„ฑ์— ๋งž๊ฒŒ ํ˜์‹ ์ ์ธ ์ฐฝ์˜ ๋ฐœ๋ช… ๋‚ด์šฉ์„ ์ค‘์‹ฌ์œผ๋กœ 4000 ํ† ํฐ ์ด์ƒ ์ž‘์„ฑํ•˜๋ผ."],
106
  ["๊ณ„์† ์ด์–ด์„œ ๋‹ต๋ณ€ํ•˜๋ผ"],
107
  ],
108
+ theme="Nymbo/Nymbo_Theme",
109
  cache_examples=False, # ์บ์‹ฑ ๋น„ํ™œ์„ฑํ™” ์„ค์ •
 
110
  )
111
 
112
  if __name__ == "__main__":
113
+ demo.launch()