Spaces:
Runtime error
Runtime error
A-baoYang
commited on
Commit
•
4f0769b
1
Parent(s):
b60e5ba
Initial Commit
Browse files- api_calls.py +43 -0
- app.py +206 -0
- data/cat_pain_detection/fgs_cat_examples/5f2afc_3c44de4afb8345a2a56828e3dd166f41~mv2.jpg +0 -0
- data/cat_pain_detection/fgs_cat_examples/5f2afc_4682c45dd23c4c978391d51594997534~mv2.jpg +0 -0
- data/cat_pain_detection/fgs_cat_examples/5f2afc_555ef67bce36440295cca183664d2a8d~mv2.jpg +0 -0
- data/cat_pain_detection/fgs_cat_examples/5f2afc_64d5d192f68942869bac8adba7657fd4~mv2.jpg +0 -0
- data/cat_pain_detection/fgs_cat_examples/5f2afc_73b25f3c2ad8483ab596a5639d1205d5~mv2.jpg +0 -0
- data/cat_pain_detection/fgs_cat_examples/5f2afc_81ee81b55cd24c3c87144d8c34e0933b~mv2.jpg +0 -0
- data/cat_pain_detection/fgs_cat_examples/5f2afc_9d9838561cde41d3b2dc9ef079dc2303~mv2.jpg +0 -0
- data/cat_pain_detection/fgs_cat_examples/5f2afc_c97c7eda58e142689b15abcd5d706dfa~mv2.jpg +0 -0
- data/cat_pain_detection/fgs_cat_examples/5f2afc_d00100e4a94c4e179821fe47cd059198~mv2.jpg +0 -0
- data/cat_pain_detection/fgs_cat_examples/5f2afc_da95c2a1a3294701a007d34ec02f62a5~mv2.jpg +0 -0
- data/cat_pain_detection/fgs_cat_examples/5f2afc_ee164e5ec5174c61907a63da48a149f2~mv2.jpg +0 -0
- data/image_for_test/screenshot_for_test-esg_report_table.png +0 -0
- data/image_for_test/screenshot_for_test-esg_report_table2.png +0 -0
- data/image_for_test/screenshot_for_test-esg_report_table3.png +0 -0
- data/image_for_test/screenshot_for_test-esg_report_table4.png +0 -0
- data/image_for_test/screenshot_for_test-esg_report_table5.png +0 -0
- data/image_for_test/screenshot_for_test-esg_report_table6.png +0 -0
- data/image_for_test/screenshot_for_test-financial_report_table.png +0 -0
- data/image_for_test/screenshot_for_test-medical_thesis_table.png +0 -0
- data/image_for_test/screenshot_for_test-medical_thesis_table2.jpg +0 -0
api_calls.py
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
|
3 |
+
API_ENDPOINT = "http://35.229.175.237:8889/"
|
4 |
+
|
5 |
+
# function to call api
|
6 |
+
def call_api(api_path, api_params):
|
7 |
+
url = f"{API_ENDPOINT}/{api_path}"
|
8 |
+
response = requests.post(
|
9 |
+
url, json=**api_params, headers={"Content-Type": "application/json"})
|
10 |
+
return response.json()
|
11 |
+
|
12 |
+
def api_qa_normal(query, filtered_data, prompt_template):
|
13 |
+
api_path = "qa/normal"
|
14 |
+
api_params = {
|
15 |
+
"query": query,
|
16 |
+
"filtered_data": filtered_data,
|
17 |
+
"prompt_template": prompt_template
|
18 |
+
}
|
19 |
+
return call_api(api_path, api_params)
|
20 |
+
|
21 |
+
def api_qa_waterfee(query, filtered_data, prompt_template):
|
22 |
+
api_path = "qa/waterfee"
|
23 |
+
api_params = {
|
24 |
+
"query": query,
|
25 |
+
"filtered_data": filtered_data,
|
26 |
+
"prompt_template": prompt_template
|
27 |
+
}
|
28 |
+
return call_api(api_path, api_params)
|
29 |
+
|
30 |
+
def api_ocr(image_filepath, model_provider):
|
31 |
+
api_path = "ocr"
|
32 |
+
api_params = {
|
33 |
+
"image_filepath": image_filepath,
|
34 |
+
"model_provider": model_provider
|
35 |
+
}
|
36 |
+
return call_api(api_path, api_params)
|
37 |
+
|
38 |
+
def api_model_cat_pain_assessment(user_input_image):
|
39 |
+
api_path = "model/cat_pain_assessment"
|
40 |
+
api_params = {
|
41 |
+
"user_input_image": user_input_image
|
42 |
+
}
|
43 |
+
return call_api(api_path, api_params)
|
app.py
ADDED
@@ -0,0 +1,206 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import arrow
|
2 |
+
import gradio as gr
|
3 |
+
import os
|
4 |
+
import re
|
5 |
+
import pandas as pd
|
6 |
+
from pathlib import Path
|
7 |
+
from time import sleep
|
8 |
+
from tqdm import tqdm
|
9 |
+
from api_calls import *
|
10 |
+
|
11 |
+
ROOT_DIR = Path(__file__).resolve().parents[0]
|
12 |
+
|
13 |
+
|
14 |
+
def preview_uploaded_file(file_paths):
|
15 |
+
if file_paths:
|
16 |
+
return gr.update(value=file_paths[0])
|
17 |
+
else:
|
18 |
+
return gr.update(value=None)
|
19 |
+
|
20 |
+
def open_data_check(checked):
|
21 |
+
if checked:
|
22 |
+
return gr.update(visible=True)
|
23 |
+
else:
|
24 |
+
return gr.update(visible=False)
|
25 |
+
|
26 |
+
def uploaded_file_process(file_path, ocr_model_choice):
|
27 |
+
name, filetype = Path(file_path).parts[-1].split(".")[0], Path(file_path).parts[-1].split(".")[-1]
|
28 |
+
print(name)
|
29 |
+
ocr_extracted_data = api_ocr(
|
30 |
+
image_filepath=file_path, model_provider=ocr_model_choice)
|
31 |
+
return ocr_extracted_data
|
32 |
+
|
33 |
+
def reference_from_file(file_paths, ocr_model_choice="Gemini Pro Vision"):
|
34 |
+
|
35 |
+
data_array = []
|
36 |
+
for file_path in tqdm(file_paths):
|
37 |
+
data = uploaded_file_process(file_path, ocr_model_choice=ocr_model_choice)
|
38 |
+
data_array.append(data)
|
39 |
+
sleep(1)
|
40 |
+
return data_array
|
41 |
+
|
42 |
+
def print_like_dislike(x: gr.LikeData):
|
43 |
+
print(x.index, x.value, x.liked)
|
44 |
+
|
45 |
+
def bot(query, history, data_array, file_paths, qa_prompt_tmpl, checkbox_replace):
|
46 |
+
if data_array:
|
47 |
+
params = {"query": query, "filtered_data": data_array}
|
48 |
+
else:
|
49 |
+
params = {"query": query}
|
50 |
+
if checkbox_replace:
|
51 |
+
params.update({"prompt_template": qa_prompt_tmpl})
|
52 |
+
|
53 |
+
if not file_paths or "大台北" in file_paths:
|
54 |
+
func = api_qa_waterfee
|
55 |
+
else:
|
56 |
+
func = api_qa_normal
|
57 |
+
|
58 |
+
response = func(**params)
|
59 |
+
|
60 |
+
full_anwser = ""
|
61 |
+
for character in response:
|
62 |
+
full_anwser += character
|
63 |
+
yield full_anwser
|
64 |
+
|
65 |
+
def draw_cat_pain_assessment_result(user_input_image):
|
66 |
+
if user_input_image:
|
67 |
+
json_result = api_model_cat_pain_assessment(user_input_image)
|
68 |
+
total_score = sum(list(json_result.values()))
|
69 |
+
df_result = pd.DataFrame(json_result, index=[0]).T.reset_index()
|
70 |
+
df_result.columns = ["a", "b"]
|
71 |
+
return gr.BarPlot(
|
72 |
+
df_result,
|
73 |
+
x="a",
|
74 |
+
y="b",
|
75 |
+
x_title="Aspects",
|
76 |
+
y_title="Score",
|
77 |
+
title="Cat Pain Assessment",
|
78 |
+
vertical=False,
|
79 |
+
height=400,
|
80 |
+
width=800,
|
81 |
+
tooltip=["a", "b"],
|
82 |
+
y_lim=[0, 2],
|
83 |
+
scale=1,
|
84 |
+
), gr.HTML(
|
85 |
+
'<h3>Total Score</h3>'
|
86 |
+
f'<span style="font-size: 50px;">{total_score}</span>'
|
87 |
+
'<span style="font-size: 40px;">/10</span>'
|
88 |
+
), gr.HTML(
|
89 |
+
'<h3>Explanation</h3>'
|
90 |
+
'<p>Ear position: 0-2</p>'
|
91 |
+
'<p>Orbital tightening: 0-2</p>'
|
92 |
+
'<p>Muzzle tension: 0-2</p>'
|
93 |
+
'<p>Whiskers change: 0-2</p>'
|
94 |
+
'<p>Head position: 0-2</p>'
|
95 |
+
)
|
96 |
+
else:
|
97 |
+
return gr.update(value=None)
|
98 |
+
|
99 |
+
chatbot = gr.Chatbot(
|
100 |
+
[(None, "我是 ESG AI Chat\n有什麼能為您服務的嗎?")],
|
101 |
+
elem_id="chatbot",
|
102 |
+
scale=1,
|
103 |
+
height=700,
|
104 |
+
bubble_full_width=False
|
105 |
+
)
|
106 |
+
css = """
|
107 |
+
#center {text-align: center}
|
108 |
+
footer {visibility: hidden}
|
109 |
+
a {color: rgb(255, 206, 10) !important}
|
110 |
+
"""
|
111 |
+
with gr.Blocks(css=css, theme=gr.themes.Monochrome(neutral_hue="lime")) as demo:
|
112 |
+
|
113 |
+
gr.HTML("<h1>GlobalModelAI AI Product Test</h1><p>Made by `GlobalModelAI Abao`</p>", elem_id="center")
|
114 |
+
gr.Markdown("---")
|
115 |
+
|
116 |
+
with gr.Tab("OCR + Text2SQL"):
|
117 |
+
with gr.Row():
|
118 |
+
with gr.Column():
|
119 |
+
gr.Markdown("## OCR Processing", elem_id="center")
|
120 |
+
ocr_model_choice = gr.Dropdown(label="Model", value="Gemini Pro Vision", choices=["GPT-4", "Gemini Pro Vision"])
|
121 |
+
file_preview = gr.Image(type="filepath", image_mode="RGB", sources=None, label="File Preview")
|
122 |
+
file_upload = gr.File(label="Upload File", file_types=["png", "jpg", "jpeg", "helc"], file_count='multiple')
|
123 |
+
checkbox_open_data_check = gr.Checkbox(label="Open Data Check")
|
124 |
+
text_data_from_file_check = gr.Textbox(label="File Upload Status", interactive=False, visible=False)
|
125 |
+
gr.Examples(
|
126 |
+
examples=[
|
127 |
+
[[f"{ROOT_DIR}/data/image_for_test/screenshot_for_test-esg_report_table.png"]],
|
128 |
+
[[f"{ROOT_DIR}/data/image_for_test/screenshot_for_test-esg_report_table2.png"],
|
129 |
+
[f"{ROOT_DIR}/data/image_for_test/screenshot_for_test-esg_report_table3.png"]],
|
130 |
+
[[f"{ROOT_DIR}/data/image_for_test/screenshot_for_test-medical_thesis_table.png"],
|
131 |
+
[f"{ROOT_DIR}/data/image_for_test/screenshot_for_test-medical_thesis_table2.jpg"]]
|
132 |
+
],
|
133 |
+
inputs=file_upload,
|
134 |
+
outputs=text_data_from_file_check,
|
135 |
+
fn=reference_from_file,
|
136 |
+
cache_examples=True,
|
137 |
+
)
|
138 |
+
with gr.Column():
|
139 |
+
gr.Markdown("## Chat with your data", elem_id="center")
|
140 |
+
with gr.Accordion("Revise Your Prompt", open=False):
|
141 |
+
checkbox_replace = gr.Checkbox(label="Replace with new prompt")
|
142 |
+
qa_prompt_tmpl = gr.Textbox(
|
143 |
+
label="希望用於本次問答的prompt",
|
144 |
+
info="必須使用到的變數:{filtered_data}、{query}",
|
145 |
+
value=prompt_dict["qa"],
|
146 |
+
interactive=True,
|
147 |
+
)
|
148 |
+
|
149 |
+
chat_interface = gr.ChatInterface(
|
150 |
+
fn=bot,
|
151 |
+
additional_inputs=[text_data_from_file_check, file_upload, qa_prompt_tmpl, checkbox_replace],
|
152 |
+
chatbot=chatbot,
|
153 |
+
)
|
154 |
+
chatbot.like(print_like_dislike, None, None)
|
155 |
+
|
156 |
+
with gr.Tab("Cat Pain Assessment Model"):
|
157 |
+
gr.Markdown("## Cat Pain Assessment Model", elem_id="center")
|
158 |
+
with gr.Row():
|
159 |
+
user_input_image = gr.Image(
|
160 |
+
type="filepath", image_mode="RGB",
|
161 |
+
sources=["upload", "webcam", "clipboard"],
|
162 |
+
label="Upload a cat image")
|
163 |
+
with gr.Column():
|
164 |
+
cat_pain_assessment_barplot = gr.BarPlot(label="Cat Pain Assessment")
|
165 |
+
cat_pain_assessment_score = gr.HTML(elem_id="center")
|
166 |
+
cat_pain_assessment_explanation = gr.HTML()
|
167 |
+
gr.Examples(
|
168 |
+
examples=[
|
169 |
+
[f"{ROOT_DIR}/data/cat_pain_detection/fgs_cat_examples/5f2afc_3c44de4afb8345a2a56828e3dd166f41~mv2.jpg"],
|
170 |
+
[f"{ROOT_DIR}/data/cat_pain_detection/fgs_cat_examples/5f2afc_9d9838561cde41d3b2dc9ef079dc2303~mv2.jpg"],
|
171 |
+
[f"{ROOT_DIR}/data/cat_pain_detection/fgs_cat_examples/5f2afc_da95c2a1a3294701a007d34ec02f62a5~mv2.jpg"],
|
172 |
+
],
|
173 |
+
inputs=user_input_image,
|
174 |
+
outputs=[cat_pain_assessment_barplot, cat_pain_assessment_score, cat_pain_assessment_explanation],
|
175 |
+
fn=draw_cat_pain_assessment_result,
|
176 |
+
cache_examples=True,
|
177 |
+
)
|
178 |
+
|
179 |
+
# Callbacks
|
180 |
+
## OCR + Text2SQL
|
181 |
+
file_upload.upload(
|
182 |
+
reference_from_file, [file_upload, ocr_model_choice], [text_data_from_file_check]
|
183 |
+
)
|
184 |
+
file_upload.change(
|
185 |
+
preview_uploaded_file, [file_upload], [file_preview]
|
186 |
+
)
|
187 |
+
ocr_model_choice.change(
|
188 |
+
reference_from_file, [file_upload, ocr_model_choice], [text_data_from_file_check]
|
189 |
+
)
|
190 |
+
checkbox_open_data_check.select(
|
191 |
+
open_data_check, [checkbox_open_data_check], [text_data_from_file_check]
|
192 |
+
)
|
193 |
+
|
194 |
+
## Cat Pain Assessment Model
|
195 |
+
user_input_image.change(
|
196 |
+
draw_cat_pain_assessment_result, [user_input_image],
|
197 |
+
[cat_pain_assessment_barplot, cat_pain_assessment_score, cat_pain_assessment_explanation]
|
198 |
+
)
|
199 |
+
|
200 |
+
|
201 |
+
if __name__ == "__main__":
|
202 |
+
demo.queue().launch(
|
203 |
+
max_threads=10,
|
204 |
+
server_name=os.environ.get("FRONTEND_HOST", "127.0.0.1"),
|
205 |
+
server_port=int(os.environ.get("FRONTEND_PORT", 7862)),
|
206 |
+
)
|
data/cat_pain_detection/fgs_cat_examples/5f2afc_3c44de4afb8345a2a56828e3dd166f41~mv2.jpg
ADDED
data/cat_pain_detection/fgs_cat_examples/5f2afc_4682c45dd23c4c978391d51594997534~mv2.jpg
ADDED
data/cat_pain_detection/fgs_cat_examples/5f2afc_555ef67bce36440295cca183664d2a8d~mv2.jpg
ADDED
data/cat_pain_detection/fgs_cat_examples/5f2afc_64d5d192f68942869bac8adba7657fd4~mv2.jpg
ADDED
data/cat_pain_detection/fgs_cat_examples/5f2afc_73b25f3c2ad8483ab596a5639d1205d5~mv2.jpg
ADDED
data/cat_pain_detection/fgs_cat_examples/5f2afc_81ee81b55cd24c3c87144d8c34e0933b~mv2.jpg
ADDED
data/cat_pain_detection/fgs_cat_examples/5f2afc_9d9838561cde41d3b2dc9ef079dc2303~mv2.jpg
ADDED
data/cat_pain_detection/fgs_cat_examples/5f2afc_c97c7eda58e142689b15abcd5d706dfa~mv2.jpg
ADDED
data/cat_pain_detection/fgs_cat_examples/5f2afc_d00100e4a94c4e179821fe47cd059198~mv2.jpg
ADDED
data/cat_pain_detection/fgs_cat_examples/5f2afc_da95c2a1a3294701a007d34ec02f62a5~mv2.jpg
ADDED
data/cat_pain_detection/fgs_cat_examples/5f2afc_ee164e5ec5174c61907a63da48a149f2~mv2.jpg
ADDED
data/image_for_test/screenshot_for_test-esg_report_table.png
ADDED
data/image_for_test/screenshot_for_test-esg_report_table2.png
ADDED
data/image_for_test/screenshot_for_test-esg_report_table3.png
ADDED
data/image_for_test/screenshot_for_test-esg_report_table4.png
ADDED
data/image_for_test/screenshot_for_test-esg_report_table5.png
ADDED
data/image_for_test/screenshot_for_test-esg_report_table6.png
ADDED
data/image_for_test/screenshot_for_test-financial_report_table.png
ADDED
data/image_for_test/screenshot_for_test-medical_thesis_table.png
ADDED
data/image_for_test/screenshot_for_test-medical_thesis_table2.jpg
ADDED