JasiLiang commited on
Commit
62d106f
·
verified ·
1 Parent(s): fca928b

initial commit

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. README.md +35 -6
  2. app.py +65 -0
  3. arena_elo/LICENSE +21 -0
  4. arena_elo/README.md +46 -0
  5. arena_elo/battle_count_heatmap.png +0 -0
  6. arena_elo/cut_off_date.txt +1 -0
  7. arena_elo/elo_rating/__init__.py +0 -0
  8. arena_elo/elo_rating/__pycache__/__init__.cpython-310.pyc +0 -0
  9. arena_elo/elo_rating/__pycache__/basic_stats.cpython-310.pyc +0 -0
  10. arena_elo/elo_rating/__pycache__/clean_battle_data.cpython-310.pyc +0 -0
  11. arena_elo/elo_rating/__pycache__/elo_analysis.cpython-310.pyc +0 -0
  12. arena_elo/elo_rating/__pycache__/generate_leaderboard.cpython-310.pyc +0 -0
  13. arena_elo/elo_rating/__pycache__/inspect_cost.cpython-310.pyc +0 -0
  14. arena_elo/elo_rating/__pycache__/inspect_elo_rating_pkl.cpython-310.pyc +0 -0
  15. arena_elo/elo_rating/__pycache__/model_registry.cpython-310.pyc +0 -0
  16. arena_elo/elo_rating/__pycache__/utils.cpython-310.pyc +0 -0
  17. arena_elo/elo_rating/basic_stats.py +227 -0
  18. arena_elo/elo_rating/clean_battle_data.py +342 -0
  19. arena_elo/elo_rating/elo_analysis.py +395 -0
  20. arena_elo/elo_rating/filter_clean_battle_data.py +34 -0
  21. arena_elo/elo_rating/generate_leaderboard.py +88 -0
  22. arena_elo/elo_rating/inspect_conv_rating.py +234 -0
  23. arena_elo/elo_rating/inspect_cost.py +177 -0
  24. arena_elo/elo_rating/inspect_elo_rating_pkl.py +33 -0
  25. arena_elo/elo_rating/model_registry.py +578 -0
  26. arena_elo/elo_rating/upload_battle_data.py +193 -0
  27. arena_elo/elo_rating/utils.py +83 -0
  28. arena_elo/evaluator/convert_to_evaluator_data.py +134 -0
  29. arena_elo/evaluator/rating_analysis.ipynb +321 -0
  30. arena_elo/get_latest_data.sh +17 -0
  31. arena_elo/pyproject.toml +28 -0
  32. arena_elo/requirements.txt +28 -0
  33. arena_elo/results/20241224/clean_battle.json +210 -0
  34. arena_elo/results/20241224/elo_results.pkl +3 -0
  35. arena_elo/results/20241224/leaderboard.csv +3 -0
  36. arena_elo/results/20241226/clean_battle.json +482 -0
  37. arena_elo/results/20241226/elo_results.pkl +3 -0
  38. arena_elo/results/20241226/leaderboard.csv +9 -0
  39. arena_elo/results/latest/clean_battle.json +482 -0
  40. arena_elo/results/latest/elo_results.pkl +3 -0
  41. arena_elo/results/latest/leaderboard.csv +9 -0
  42. arena_elo/simple_test.py +16 -0
  43. arena_elo/update_elo_rating.sh +30 -0
  44. arena_elo/win_fraction_heatmap.png +0 -0
  45. logs/vote_log/2024-12-24-conv.json +26 -0
  46. logs/vote_log/2024-12-25-conv.json +7 -0
  47. logs/vote_log/2024-12-26-conv.json +27 -0
  48. logs/vote_log/2024-12-27-conv.json +4 -0
  49. logs/vote_log/gr_web_image_generation_multi.log +450 -0
  50. logs/vote_log/gr_web_image_generation_multi.log.2024-12-25 +797 -0
README.md CHANGED
@@ -1,14 +1,43 @@
1
  ---
2
  title: IDEA Bench Arena
3
- emoji: 🌖
4
- colorFrom: gray
5
- colorTo: gray
6
  sdk: gradio
7
  sdk_version: 5.9.1
8
  app_file: app.py
9
  pinned: false
10
- license: cc-by-nc-4.0
11
- short_description: Official ARENA of IDEA-Bench
12
  ---
13
 
14
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
  title: IDEA Bench Arena
3
+ emoji: 📉
4
+ colorFrom: blue
5
+ colorTo: green
6
  sdk: gradio
7
  sdk_version: 5.9.1
8
  app_file: app.py
9
  pinned: false
10
+ license: cc-by-4.0
11
+ short_description: Official arena of IDEA-Bench.
12
  ---
13
 
14
+ ## Installation
15
+
16
+ - for cuda 11.8
17
+ ```bash
18
+ conda install pytorch torchvision torchaudio pytorch-cuda=11.8 -c pytorch -c nvidia
19
+ pip3 install -U xformers --index-url https://download.pytorch.org/whl/cu118
20
+ pip install -r requirements.txt
21
+ ```
22
+ - for cuda 12.1
23
+ ```bash
24
+ conda install pytorch torchvision torchaudio pytorch-cuda=12.1 -c pytorch -c nvidia
25
+ pip install -r requirements.txt
26
+ ```
27
+
28
+ ## Start Hugging Face UI
29
+ ```bash
30
+ python app.py
31
+ ```
32
+
33
+ ## Start Log server
34
+ ```bash
35
+ uvicorn serve.log_server:app --reload --port 22005 --host 0.0.0.0
36
+ ```
37
+
38
+ ## Update leaderboard
39
+ ```bash
40
+ cd arena_elo && bash update_leaderboard.sh
41
+ ```
42
+
43
+ Paper: [https://arxiv.org/abs/2412.11767](https://arxiv.org/abs/2412.11767)
app.py ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import os
3
+ # os.system("pip install -r requirements.txt -U")
4
+ # os.system("pip uninstall -y apex")
5
+ # os.system("pip uninstall -y flash-attn")
6
+ # os.system("FLASH_ATTENTION_FORCE_BUILD=TRUE pip install flash-attn")
7
+ from serve.gradio_web import *
8
+ from serve.leaderboard import build_leaderboard_tab
9
+ from model.model_manager import ModelManager
10
+ from pathlib import Path
11
+ from serve.constants import SERVER_PORT, ROOT_PATH, ELO_RESULTS_DIR
12
+
13
+ def build_combine_demo(models, elo_results_file, leaderboard_table_file):
14
+
15
+ with gr.Blocks(
16
+ title="Play with Open Vision Models",
17
+ theme=gr.themes.Default(),
18
+ css=block_css,
19
+ ) as demo:
20
+ with gr.Tabs() as tabs_combine:
21
+ with gr.Tab("Image Generation", id=0):
22
+ with gr.Tabs() as tabs_ig:
23
+ with gr.Tab("Generation Arena (battle)", id=0):
24
+ build_side_by_side_ui_anony(models)
25
+
26
+ with gr.Tab("Generation Arena (side-by-side)", id=1):
27
+ build_side_by_side_ui_named(models)
28
+
29
+ if elo_results_file:
30
+ with gr.Tab("Generation Leaderboard", id=2):
31
+ build_leaderboard_tab(elo_results_file['t2i_generation'], leaderboard_table_file['t2i_generation'])
32
+
33
+ with gr.Tab("About Us", id=3):
34
+ build_about()
35
+
36
+ return demo
37
+
38
+
39
+ def load_elo_results(elo_results_dir):
40
+ from collections import defaultdict
41
+ elo_results_file = defaultdict(lambda: None)
42
+ leaderboard_table_file = defaultdict(lambda: None)
43
+ if elo_results_dir is not None:
44
+ elo_results_dir = Path(elo_results_dir)
45
+ elo_results_file = {}
46
+ leaderboard_table_file = {}
47
+ for file in elo_results_dir.glob('elo_results*.pkl'):
48
+ elo_results_file['t2i_generation'] = file
49
+ for file in elo_results_dir.glob('*leaderboard.csv'):
50
+ leaderboard_table_file['t2i_generation'] = file
51
+
52
+ return elo_results_file, leaderboard_table_file
53
+
54
+ if __name__ == "__main__":
55
+
56
+ server_port = int(SERVER_PORT)
57
+ root_path = ROOT_PATH
58
+ elo_results_dir = ELO_RESULTS_DIR
59
+
60
+ models = ModelManager()
61
+
62
+ elo_results_file, leaderboard_table_file = load_elo_results(elo_results_dir)
63
+ demo = build_combine_demo(models, elo_results_file, leaderboard_table_file)
64
+ demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
65
+ demo.launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
arena_elo/LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2024 WildVision-Bench
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
arena_elo/README.md ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ## Computing the Elo Ratings
2
+
3
+
4
+ ```bash
5
+ apt-get -y install pkg-config
6
+ pip install -r requirements.txt
7
+ ```
8
+
9
+
10
+ ### to update the leaderboard
11
+
12
+ ```bash
13
+ export LOGDIR="/path/to/your/logdir"
14
+ bash update_elo_rating.sh
15
+ ```
16
+
17
+ ### to inspect the leaderboard status
18
+ ```bash
19
+ python -m elo_rating.inspect_elo_rating_pkl
20
+ ```
21
+
22
+ ### to inspect the collected data status and cost
23
+ ```bash
24
+ export LOGDIR="/path/to/your/logdir"
25
+ python -m elo_rating.inspect_cost
26
+ ```
27
+
28
+ ### to upload the battle data to hugging face🤗
29
+ ```bash
30
+ export HUGGINGFACE_TOKEN="your_huggingface_token"
31
+ bash get_latest_data.sh
32
+ python -m elo_rating.upload_battle_data --repo_id "WildVision/wildvision-bench" --log_dir "./vision-arena-logs/"
33
+ ```
34
+
35
+ ### to upload the chat data to hugging face🤗
36
+ ```bash
37
+ export HUGGINGFACE_TOKEN="your_huggingface_token"
38
+ bash get_latest_data.sh
39
+ python -m elo_rating.upload_chat_data --repo_id "WildVision/wildvision-bench" --log_dir "./vision-arena-logs/"
40
+ ```
41
+
42
+
43
+ ### to get the collected data
44
+ ```bash
45
+ python -m
46
+
arena_elo/battle_count_heatmap.png ADDED
arena_elo/cut_off_date.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ 20241226
arena_elo/elo_rating/__init__.py ADDED
File without changes
arena_elo/elo_rating/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (185 Bytes). View file
 
arena_elo/elo_rating/__pycache__/basic_stats.cpython-310.pyc ADDED
Binary file (6.26 kB). View file
 
arena_elo/elo_rating/__pycache__/clean_battle_data.cpython-310.pyc ADDED
Binary file (8.12 kB). View file
 
arena_elo/elo_rating/__pycache__/elo_analysis.cpython-310.pyc ADDED
Binary file (9.89 kB). View file
 
arena_elo/elo_rating/__pycache__/generate_leaderboard.cpython-310.pyc ADDED
Binary file (2.02 kB). View file
 
arena_elo/elo_rating/__pycache__/inspect_cost.cpython-310.pyc ADDED
Binary file (4.96 kB). View file
 
arena_elo/elo_rating/__pycache__/inspect_elo_rating_pkl.cpython-310.pyc ADDED
Binary file (1.08 kB). View file
 
arena_elo/elo_rating/__pycache__/model_registry.cpython-310.pyc ADDED
Binary file (14.4 kB). View file
 
arena_elo/elo_rating/__pycache__/utils.cpython-310.pyc ADDED
Binary file (2.29 kB). View file
 
arena_elo/elo_rating/basic_stats.py ADDED
@@ -0,0 +1,227 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import code
3
+ import datetime
4
+ import json
5
+ import os
6
+ from pytz import timezone
7
+ import time
8
+
9
+ import pandas as pd # pandas>=2.0.3
10
+ import plotly.express as px
11
+ import plotly.graph_objects as go
12
+ from tqdm import tqdm
13
+
14
+ NUM_SERVERS = 1
15
+ LOG_ROOT_DIR = os.getenv("LOGDIR", None)
16
+ if LOG_ROOT_DIR is None:
17
+ raise ValueError("LOGDIR environment variable not set, please set it by `export LOGDIR=...`")
18
+
19
+ def get_log_files(max_num_files=None):
20
+ log_root = os.path.expanduser(LOG_ROOT_DIR)
21
+ filenames = []
22
+ if NUM_SERVERS == 1:
23
+ for filename in os.listdir(log_root):
24
+ if filename.endswith("-conv.json"):
25
+ filepath = f"{log_root}/{filename}"
26
+ name_tstamp_tuple = (filepath, os.path.getmtime(filepath))
27
+ filenames.append(name_tstamp_tuple)
28
+ else:
29
+ for i in range(NUM_SERVERS):
30
+ for filename in os.listdir(f"{log_root}/server{i}"):
31
+ if filename.endswith("-conv.json"):
32
+ filepath = f"{log_root}/server{i}/{filename}"
33
+ name_tstamp_tuple = (filepath, os.path.getmtime(filepath))
34
+ filenames.append(name_tstamp_tuple)
35
+ # sort by tstamp
36
+ filenames = sorted(filenames, key=lambda x: x[1])
37
+ filenames = [x[0] for x in filenames]
38
+
39
+ max_num_files = max_num_files or len(filenames)
40
+ filenames = filenames[-max_num_files:]
41
+ return filenames
42
+
43
+
44
+ def load_log_files(filename):
45
+ data = []
46
+ for retry in range(5):
47
+ try:
48
+ lines = open(filename).readlines()
49
+ break
50
+ except FileNotFoundError:
51
+ time.sleep(2)
52
+
53
+ for l in lines:
54
+ row = json.loads(l)
55
+ data.append(
56
+ dict(
57
+ type=row["type"],
58
+ tstamp=row["tstamp"],
59
+ model=row.get("model", ""),
60
+ models=row.get("models", ["", ""]),
61
+ )
62
+ )
63
+ return data
64
+
65
+
66
+ def load_log_files_parallel(log_files, num_threads=16):
67
+ data_all = []
68
+ from multiprocessing import Pool
69
+
70
+ with Pool(num_threads) as p:
71
+ ret_all = list(tqdm(p.imap(load_log_files, log_files), total=len(log_files)))
72
+ for ret in ret_all:
73
+ data_all.extend(ret)
74
+ return data_all
75
+
76
+
77
+ def get_anony_vote_df(df):
78
+ anony_vote_df = df[
79
+ df["type"].isin(["leftvote", "rightvote", "tievote", "bothbad_vote"])
80
+ ]
81
+ anony_vote_df = anony_vote_df[anony_vote_df["models"].apply(lambda x: x[0] == "")]
82
+ return anony_vote_df
83
+
84
+
85
+ def merge_counts(series, on, names):
86
+ ret = pd.merge(series[0], series[1], on=on)
87
+ for i in range(2, len(series)):
88
+ ret = pd.merge(ret, series[i], on=on)
89
+ ret = ret.reset_index()
90
+ old_names = list(ret.columns)[-len(series) :]
91
+ rename = {old_name: new_name for old_name, new_name in zip(old_names, names)}
92
+ ret = ret.rename(columns=rename)
93
+ return ret
94
+
95
+
96
+ def report_basic_stats(log_files):
97
+ df_all = load_log_files_parallel(log_files)
98
+ df_all = pd.DataFrame(df_all)
99
+ now_t = df_all["tstamp"].max()
100
+ df_1_hour = df_all[df_all["tstamp"] > (now_t - 3600)]
101
+ df_1_day = df_all[df_all["tstamp"] > (now_t - 3600 * 24)]
102
+ anony_vote_df_all = get_anony_vote_df(df_all)
103
+
104
+ # Chat trends
105
+ chat_dates = [
106
+ datetime.datetime.fromtimestamp(x, tz=timezone("US/Pacific")).strftime(
107
+ "%Y-%m-%d"
108
+ )
109
+ for x in df_all[df_all["type"] == "chat"]["tstamp"]
110
+ ]
111
+ chat_dates_counts = pd.value_counts(chat_dates)
112
+ vote_dates = [
113
+ datetime.datetime.fromtimestamp(x, tz=timezone("US/Pacific")).strftime(
114
+ "%Y-%m-%d"
115
+ )
116
+ for x in anony_vote_df_all["tstamp"]
117
+ ]
118
+ vote_dates_counts = pd.value_counts(vote_dates)
119
+ chat_dates_bar = go.Figure(
120
+ data=[
121
+ go.Bar(
122
+ name="Anony. Vote",
123
+ x=vote_dates_counts.index,
124
+ y=vote_dates_counts,
125
+ text=[f"{val:.0f}" for val in vote_dates_counts],
126
+ textposition="auto",
127
+ ),
128
+ go.Bar(
129
+ name="Chat",
130
+ x=chat_dates_counts.index,
131
+ y=chat_dates_counts,
132
+ text=[f"{val:.0f}" for val in chat_dates_counts],
133
+ textposition="auto",
134
+ ),
135
+ ]
136
+ )
137
+ chat_dates_bar.update_layout(
138
+ barmode="stack",
139
+ xaxis_title="Dates",
140
+ yaxis_title="Count",
141
+ height=300,
142
+ width=1200,
143
+ )
144
+
145
+ # Model call counts
146
+ model_hist_all = df_all[df_all["type"] == "chat"]["model"].value_counts()
147
+ model_hist_1_day = df_1_day[df_1_day["type"] == "chat"]["model"].value_counts()
148
+ model_hist_1_hour = df_1_hour[df_1_hour["type"] == "chat"]["model"].value_counts()
149
+ model_hist = merge_counts(
150
+ [model_hist_all, model_hist_1_day, model_hist_1_hour],
151
+ on="model",
152
+ names=["All", "Last Day", "Last Hour"],
153
+ )
154
+ model_hist_md = model_hist.to_markdown(index=False, tablefmt="github")
155
+
156
+ # Action counts
157
+ action_hist_all = df_all["type"].value_counts()
158
+ action_hist_1_day = df_1_day["type"].value_counts()
159
+ action_hist_1_hour = df_1_hour["type"].value_counts()
160
+ action_hist = merge_counts(
161
+ [action_hist_all, action_hist_1_day, action_hist_1_hour],
162
+ on="type",
163
+ names=["All", "Last Day", "Last Hour"],
164
+ )
165
+ action_hist_md = action_hist.to_markdown(index=False, tablefmt="github")
166
+
167
+ # Anony vote counts
168
+ anony_vote_hist_all = anony_vote_df_all["type"].value_counts()
169
+ anony_vote_df_1_day = get_anony_vote_df(df_1_day)
170
+ anony_vote_hist_1_day = anony_vote_df_1_day["type"].value_counts()
171
+ # anony_vote_df_1_hour = get_anony_vote_df(df_1_hour)
172
+ # anony_vote_hist_1_hour = anony_vote_df_1_hour["type"].value_counts()
173
+ anony_vote_hist = merge_counts(
174
+ [anony_vote_hist_all, anony_vote_hist_1_day],
175
+ on="type",
176
+ names=["All", "Last Day"],
177
+ )
178
+ anony_vote_hist_md = anony_vote_hist.to_markdown(index=False, tablefmt="github")
179
+
180
+ # Last 24 hours
181
+ chat_1_day = df_1_day[df_1_day["type"] == "chat"]
182
+ num_chats_last_24_hours = []
183
+ base = df_1_day["tstamp"].min()
184
+ for i in range(24, 0, -1):
185
+ left = base + (i - 1) * 3600
186
+ right = base + i * 3600
187
+ num = ((chat_1_day["tstamp"] >= left) & (chat_1_day["tstamp"] < right)).sum()
188
+ num_chats_last_24_hours.append(num)
189
+ times = [
190
+ datetime.datetime.fromtimestamp(
191
+ base + i * 3600, tz=timezone("US/Pacific")
192
+ ).strftime("%Y-%m-%d %H:%M:%S %Z")
193
+ for i in range(24, 0, -1)
194
+ ]
195
+ last_24_hours_df = pd.DataFrame({"time": times, "value": num_chats_last_24_hours})
196
+ last_24_hours_md = last_24_hours_df.to_markdown(index=False, tablefmt="github")
197
+
198
+ # Last update datetime
199
+ last_updated_tstamp = now_t
200
+ last_updated_datetime = datetime.datetime.fromtimestamp(
201
+ last_updated_tstamp, tz=timezone("US/Pacific")
202
+ ).strftime("%Y-%m-%d %H:%M:%S %Z")
203
+
204
+ # code.interact(local=locals())
205
+
206
+ return {
207
+ "chat_dates_bar": chat_dates_bar,
208
+ "model_hist_md": model_hist_md,
209
+ "action_hist_md": action_hist_md,
210
+ "anony_vote_hist_md": anony_vote_hist_md,
211
+ "num_chats_last_24_hours": last_24_hours_md,
212
+ "last_updated_datetime": last_updated_datetime,
213
+ }
214
+
215
+
216
+ if __name__ == "__main__":
217
+ parser = argparse.ArgumentParser()
218
+ parser.add_argument("--max-num-files", type=int)
219
+ args = parser.parse_args()
220
+
221
+ log_files = get_log_files(args.max_num_files)
222
+ basic_stats = report_basic_stats(log_files)
223
+
224
+ print(basic_stats["action_hist_md"] + "\n")
225
+ print(basic_stats["model_hist_md"] + "\n")
226
+ print(basic_stats["anony_vote_hist_md"] + "\n")
227
+ print(basic_stats["num_chats_last_24_hours"] + "\n")
arena_elo/elo_rating/clean_battle_data.py ADDED
@@ -0,0 +1,342 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Clean chatbot arena battle log.
3
+
4
+ Usage:
5
+ python3 clean_battle_data.py --mode conv_release
6
+ """
7
+ import argparse
8
+ import datetime
9
+ import json
10
+ import os
11
+ import sys
12
+ from pytz import timezone
13
+ import time
14
+ import PIL
15
+ from PIL import ImageFile
16
+ ImageFile.LOAD_TRUNCATED_IMAGES = True
17
+
18
+ from tqdm import tqdm
19
+
20
+ from .basic_stats import get_log_files, NUM_SERVERS, LOG_ROOT_DIR
21
+ from .utils import detect_language, get_time_stamp_from_date
22
+
23
+ VOTES = ["tievote", "leftvote", "rightvote", "bothbad_vote"]
24
+ IDENTITY_WORDS = [
25
+ "vicuna",
26
+ "lmsys",
27
+ "koala",
28
+ "uc berkeley",
29
+ "open assistant",
30
+ "laion",
31
+ "chatglm",
32
+ "chatgpt",
33
+ "gpt-4",
34
+ "openai",
35
+ "anthropic",
36
+ "claude",
37
+ "bard",
38
+ "palm",
39
+ "lamda",
40
+ "google",
41
+ "llama",
42
+ "qianwan",
43
+ "alibaba",
44
+ "mistral",
45
+ "zhipu",
46
+ "KEG lab",
47
+ "01.AI",
48
+ "AI2",
49
+ "Tülu",
50
+ "Tulu",
51
+ "NETWORK ERROR DUE TO HIGH TRAFFIC. PLEASE REGENERATE OR REFRESH THIS PAGE.",
52
+ "$MODERATION$ YOUR INPUT VIOLATES OUR CONTENT MODERATION GUIDELINES.",
53
+ "API REQUEST ERROR. Please increase the number of max tokens.",
54
+ "**API REQUEST ERROR** Reason: The response was blocked.",
55
+ "**API REQUEST ERROR**",
56
+ ]
57
+
58
+ for i in range(len(IDENTITY_WORDS)):
59
+ IDENTITY_WORDS[i] = IDENTITY_WORDS[i].lower()
60
+
61
+
62
+ def remove_html(raw):
63
+ if raw.startswith("<h3>"):
64
+ return raw[raw.find(": ") + 2 : -len("</h3>\n")]
65
+ if raw.startswith("### Model A: ") or raw.startswith("### Model B: "):
66
+ return raw[13:]
67
+ return raw
68
+
69
+
70
+ def to_openai_format(messages):
71
+ roles = ["user", "assistant"]
72
+ ret = []
73
+ for i, x in enumerate(messages):
74
+ ret.append({"role": roles[i % 2], "content": x[1]})
75
+ return ret
76
+
77
+
78
+ def replace_model_name(old_name, tstamp):
79
+ replace_dict = {
80
+ "bard": "palm-2",
81
+ "claude-v1": "claude-1",
82
+ "claude-instant-v1": "claude-instant-1",
83
+ "oasst-sft-1-pythia-12b": "oasst-pythia-12b",
84
+ "claude-2": "claude-2.0",
85
+ "PlayGroundV2": "PlayGround V2",
86
+ "PlayGroundV2.5": "PlayGround V2.5",
87
+ }
88
+ if old_name in ["gpt-4", "gpt-3.5-turbo"]:
89
+ if tstamp > 1687849200:
90
+ return old_name + "-0613"
91
+ else:
92
+ return old_name + "-0314"
93
+ if old_name in replace_dict:
94
+ return replace_dict[old_name]
95
+ return old_name
96
+
97
+
98
+ def read_file(filename):
99
+ data = []
100
+ for retry in range(5):
101
+ try:
102
+ # lines = open(filename).readlines()
103
+ for l in open(filename):
104
+ row = json.loads(l)
105
+ if row["type"] in VOTES:
106
+ data.append(row)
107
+ break
108
+ except FileNotFoundError:
109
+ time.sleep(2)
110
+ except json.JSONDecodeError:
111
+ print(f"Error in reading {filename}")
112
+ print(row)
113
+ exit(0)
114
+ return data
115
+
116
+
117
+ def read_file_parallel(log_files, num_threads=16):
118
+ data_all = []
119
+ from multiprocessing import Pool
120
+
121
+ with Pool(num_threads) as p:
122
+ ret_all = list(tqdm(p.imap(read_file, log_files), total=len(log_files)))
123
+ for ret in ret_all:
124
+ data_all.extend(ret)
125
+ return data_all
126
+
127
+ def load_image(image_path):
128
+ try:
129
+ return PIL.Image.open(image_path)
130
+ except:
131
+ return None
132
+
133
+ def clean_battle_data(log_files, exclude_model_names, ban_ip_list=None, sanitize_ip=False, mode="simple"):
134
+ data = read_file_parallel(log_files, num_threads=16)
135
+
136
+ convert_type = {
137
+ "leftvote": "model_a",
138
+ "rightvote": "model_b",
139
+ "tievote": "tie",
140
+ "bothbad_vote": "tie (bothbad)",
141
+ }
142
+
143
+ all_models = set()
144
+ all_ips = dict()
145
+ ct_anony = 0
146
+ ct_invalid = 0
147
+ ct_leaked_identity = 0
148
+ ct_banned = 0
149
+ battles = []
150
+ for row in tqdm(data, desc="Cleaning"):
151
+ if row["models"][0] is None or row["models"][1] is None:
152
+ print(f"Invalid model names: {row['models']}")
153
+ continue
154
+
155
+ # Resolve model names
156
+ models_public = [remove_html(row["models"][0]), remove_html(row["models"][1])]
157
+ if "model_name" in row["states"][0]:
158
+ models_hidden = [
159
+ row["states"][0]["model_name"],
160
+ row["states"][1]["model_name"],
161
+ ]
162
+ if models_hidden[0] is None:
163
+ models_hidden = models_public
164
+ else:
165
+ models_hidden = models_public
166
+
167
+ if (models_public[0] == "" and models_public[1] != "") or (
168
+ models_public[1] == "" and models_public[0] != ""
169
+ ):
170
+ ct_invalid += 1
171
+ print(f"Invalid model names: {models_public}")
172
+ continue
173
+
174
+ if row["anony"]:
175
+ anony = True
176
+ models = models_hidden
177
+ ct_anony += 1
178
+ else:
179
+ anony = False
180
+ models = models_public
181
+ if not models_public == models_hidden:
182
+ print(f"Model names mismatch: {models_public} vs {models_hidden}")
183
+ ct_invalid += 1
184
+ continue
185
+
186
+ def preprocess_model_name(m):
187
+ if m == "Playground v2":
188
+ return 'playground_PlayGroundV2_generation'
189
+ if m == "Playground v2.5":
190
+ return 'playground_PlayGroundV2.5_generation'
191
+ return m
192
+ models = [preprocess_model_name(m) for m in models]
193
+
194
+ # valid = True
195
+ # for _model in models:
196
+ # print(_model)
197
+ # input()
198
+ # try:
199
+ # platform, model_name, task = _model.split("_")
200
+ # except ValueError:
201
+ # print(f"Invalid model names: {_model}")
202
+ # valid = False
203
+ # break
204
+ # if not (platform.lower() in ["playground", "imagenhub", 'fal'] and (task == "generation" or task == "text2image")):
205
+ # valid = False
206
+ # break
207
+ # if not valid:
208
+ # ct_invalid += 1
209
+ # print(f"Invalid model names: {models} for t2i_generation")
210
+ # continue
211
+ # for i, _model in enumerate(models):
212
+ # platform, model_name, task = _model.split("_")
213
+ # models[i] = model_name
214
+
215
+ models = [replace_model_name(m, row["tstamp"]) for m in models]
216
+
217
+ # Exclude certain models
218
+ if exclude_model_names and any(x in exclude_model_names for x in models):
219
+ ct_invalid += 1
220
+ continue
221
+
222
+ if mode == "conv_release":
223
+ # assert the two images are the same
224
+ date = datetime.datetime.fromtimestamp(row["tstamp"], tz=timezone("US/Pacific")).strftime("%Y-%m-%d") # 2024-02-29
225
+ image_path_format = f"{LOG_ROOT_DIR}/{date}-convinput_images/input_image_"
226
+ image_path_0 = image_path_format + str(row["states"][0]["conv_id"]) + ".png"
227
+ image_path_1 = image_path_format + str(row["states"][1]["conv_id"]) + ".png"
228
+ if not os.path.exists(image_path_0) or not os.path.exists(image_path_1):
229
+ print(f"Image not found for {image_path_0} or {image_path_1}")
230
+ ct_invalid += 1
231
+ continue
232
+
233
+ image_0 = load_image(image_path_0)
234
+ image_1 = load_image(image_path_1)
235
+ if image_0 is None or image_1 is None:
236
+ print(f"Image not found for {image_path_0} or {image_path_1}")
237
+ ct_invalid += 1
238
+ continue
239
+ if image_0.tobytes() != image_1.tobytes():
240
+ print(f"Image not the same for {image_path_0} and {image_path_1}")
241
+ ct_invalid += 1
242
+ continue
243
+
244
+
245
+ # question_id = row["states"][0]["conv_id"]
246
+
247
+ ip = row["ip"]
248
+ if ip not in all_ips:
249
+ all_ips[ip] = {"ip": ip, "count": 0, "sanitized_id": len(all_ips)}
250
+ all_ips[ip]["count"] += 1
251
+ if sanitize_ip:
252
+ user_id = f"arena_user_{all_ips[ip]['sanitized_id']}"
253
+ else:
254
+ user_id = f"{all_ips[ip]['ip']}"
255
+
256
+ if ban_ip_list is not None and ip in ban_ip_list:
257
+ ct_banned += 1
258
+ print(f"User {user_id} is banned")
259
+ continue
260
+
261
+ # Save the results
262
+ battles.append(
263
+ dict(
264
+ model_a=models[0],
265
+ model_b=models[1],
266
+ winner=convert_type[row["type"]],
267
+ judge=f"arena_user_{user_id}",
268
+ anony=anony,
269
+ tstamp=row["tstamp"],
270
+ )
271
+ )
272
+
273
+ all_models.update(models_hidden)
274
+ battles.sort(key=lambda x: x["tstamp"])
275
+ last_updated_tstamp = battles[-1]["tstamp"]
276
+
277
+ last_updated_datetime = datetime.datetime.fromtimestamp(
278
+ last_updated_tstamp, tz=timezone("US/Pacific")
279
+ ).strftime("%Y-%m-%d %H:%M:%S %Z")
280
+
281
+ print(
282
+ f"#votes: {len(data)}, #invalid votes: {ct_invalid}, "
283
+ f"#leaked_identity: {ct_leaked_identity} "
284
+ f"#banned: {ct_banned} "
285
+ )
286
+ print(f"#battles: {len(battles)}, #anony: {ct_anony}")
287
+ print(f"#models: {len(all_models)}, {all_models}")
288
+ print(f"last-updated: {last_updated_datetime}")
289
+
290
+ if ban_ip_list is not None:
291
+ for ban_ip in ban_ip_list:
292
+ if ban_ip in all_ips:
293
+ del all_ips[ban_ip]
294
+ print("Top 30 IPs:")
295
+ print(sorted(all_ips.values(), key=lambda x: x["count"], reverse=True)[:30])
296
+ return battles
297
+
298
+
299
+ if __name__ == "__main__":
300
+ parser = argparse.ArgumentParser()
301
+ parser.add_argument("--max-num-files", type=int)
302
+ parser.add_argument(
303
+ "--mode", type=str, choices=["simple", "conv_release"], default="simple"
304
+ )
305
+ parser.add_argument("--exclude-model-names", type=str, nargs="+")
306
+ parser.add_argument("--ban-ip-file", type=str)
307
+ parser.add_argument("--sanitize-ip", action="store_true", default=False)
308
+ args = parser.parse_args()
309
+
310
+ log_files = get_log_files(args.max_num_files)
311
+ ban_ip_list = json.load(open(args.ban_ip_file)) if args.ban_ip_file else None
312
+
313
+ battles = clean_battle_data(
314
+ log_files, args.exclude_model_names or [], ban_ip_list, args.sanitize_ip, args.mode,
315
+ )
316
+ last_updated_tstamp = battles[-1]["tstamp"]
317
+ cutoff_date = datetime.datetime.fromtimestamp(
318
+ last_updated_tstamp, tz=timezone("US/Pacific")
319
+ ).strftime("%Y%m%d")
320
+
321
+ if args.mode == "simple":
322
+ for x in battles:
323
+ for key in [
324
+ "conversation_a",
325
+ "conversation_b",
326
+ "question_id",
327
+ ]:
328
+ if key in x:
329
+ del x[key]
330
+ print("Samples:")
331
+ for i in range(min(4, len(battles))):
332
+ print(battles[i])
333
+ output = f"clean_battle_{cutoff_date}.json"
334
+ elif args.mode == "conv_release":
335
+ output = f"clean_battle_conv_{cutoff_date}.json"
336
+
337
+ with open(output, "w") as fout:
338
+ json.dump(battles, fout, indent=2, ensure_ascii=False)
339
+ print(f"Write cleaned data to {output}")
340
+
341
+ with open("cut_off_date.txt", "w") as fout:
342
+ fout.write(cutoff_date)
arena_elo/elo_rating/elo_analysis.py ADDED
@@ -0,0 +1,395 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ from collections import defaultdict
3
+ import datetime
4
+ import json
5
+ import math
6
+ import pickle
7
+ from pytz import timezone
8
+
9
+ import numpy as np
10
+ import pandas as pd
11
+ import plotly.express as px
12
+ from tqdm import tqdm
13
+
14
+ from .model_registry import get_model_info
15
+ from .basic_stats import get_log_files
16
+ from .clean_battle_data import clean_battle_data
17
+
18
+ pd.options.display.float_format = "{:.2f}".format
19
+
20
+
21
+ def compute_elo(battles, K=4, SCALE=400, BASE=10, INIT_RATING=1000):
22
+ rating = defaultdict(lambda: INIT_RATING)
23
+
24
+ for rd, model_a, model_b, winner in battles[
25
+ ["model_a", "model_b", "winner"]
26
+ ].itertuples():
27
+ ra = rating[model_a]
28
+ rb = rating[model_b]
29
+ ea = 1 / (1 + BASE ** ((rb - ra) / SCALE))
30
+ eb = 1 / (1 + BASE ** ((ra - rb) / SCALE))
31
+ if winner == "model_a":
32
+ sa = 1
33
+ elif winner == "model_b":
34
+ sa = 0
35
+ elif winner == "tie" or winner == "tie (bothbad)":
36
+ sa = 0.5
37
+ else:
38
+ raise Exception(f"unexpected vote {winner}")
39
+ rating[model_a] += K * (sa - ea)
40
+ rating[model_b] += K * (1 - sa - eb)
41
+
42
+ return dict(rating)
43
+
44
+
45
+ def get_bootstrap_result(battles, func_compute_elo, num_round=1000):
46
+ rows = []
47
+ for i in tqdm(range(num_round), desc="bootstrap"):
48
+ tmp_battles = battles.sample(frac=1.0, replace=True)
49
+ print(f"Bootstrap round {i}: {tmp_battles.shape}")
50
+ rows.append(func_compute_elo(tmp_battles))
51
+ df = pd.DataFrame(rows)
52
+ return df[df.median().sort_values(ascending=False).index]
53
+
54
+
55
+ def compute_elo_mle_with_tie(df, SCALE=400, BASE=10, INIT_RATING=1000):
56
+ print(f"Columns in df: {df.columns}")
57
+ print(f"First few rows: {df.head()}")
58
+
59
+ from sklearn.linear_model import LogisticRegression
60
+
61
+ models = pd.concat([df["model_a"], df["model_b"]]).unique()
62
+ models = pd.Series(np.arange(len(models)), index=models)
63
+
64
+ # duplicate battles
65
+ df = pd.concat([df, df], ignore_index=True)
66
+ p = len(models.index)
67
+ n = df.shape[0]
68
+
69
+ X = np.zeros([n, p])
70
+ X[np.arange(n), models[df["model_a"]]] = +math.log(BASE)
71
+ X[np.arange(n), models[df["model_b"]]] = -math.log(BASE)
72
+
73
+ # one A win => two A win
74
+ Y = np.zeros(n)
75
+ Y[df["winner"] == "model_a"] = 1.0
76
+
77
+ # one tie => one A win + one B win
78
+ # find tie + tie (both bad) index
79
+ tie_idx = (df["winner"] == "tie") | (df["winner"] == "tie (bothbad)")
80
+ tie_idx[len(tie_idx) // 2 :] = False
81
+ Y[tie_idx] = 1.0
82
+
83
+ lr = LogisticRegression(fit_intercept=False)
84
+ lr.fit(X, Y)
85
+
86
+ elo_scores = SCALE * lr.coef_[0] + INIT_RATING
87
+ # calibrate llama-13b to 800 if applicable
88
+ if "llama-13b" in models.index:
89
+ elo_scores += 800 - elo_scores[models["llama-13b"]]
90
+ return pd.Series(elo_scores, index=models.index).sort_values(ascending=False)
91
+
92
+
93
+ def get_median_elo_from_bootstrap(bootstrap_df):
94
+ median = dict(bootstrap_df.quantile(0.5))
95
+ median = {k: int(v + 0.5) for k, v in median.items()}
96
+ return median
97
+
98
+
99
+ def compute_pairwise_win_fraction(battles, model_order, limit_show_number=None):
100
+ # Times each model wins as Model A
101
+ a_win_ptbl = pd.pivot_table(
102
+ battles[battles["winner"] == "model_a"],
103
+ index="model_a",
104
+ columns="model_b",
105
+ aggfunc="size",
106
+ fill_value=0,
107
+ )
108
+
109
+ # Table counting times each model wins as Model B
110
+ b_win_ptbl = pd.pivot_table(
111
+ battles[battles["winner"] == "model_b"],
112
+ index="model_a",
113
+ columns="model_b",
114
+ aggfunc="size",
115
+ fill_value=0,
116
+ )
117
+
118
+ # Table counting number of A-B pairs
119
+ num_battles_ptbl = pd.pivot_table(
120
+ battles, index="model_a", columns="model_b", aggfunc="size", fill_value=0
121
+ )
122
+
123
+ # Computing the proportion of wins for each model as A and as B
124
+ # against all other models
125
+ row_beats_col_freq = (a_win_ptbl + b_win_ptbl.T) / (
126
+ num_battles_ptbl + num_battles_ptbl.T
127
+ )
128
+
129
+ if model_order is None:
130
+ prop_wins = row_beats_col_freq.mean(axis=1).sort_values(ascending=False)
131
+ model_order = list(prop_wins.keys())
132
+
133
+ if limit_show_number is not None:
134
+ model_order = model_order[:limit_show_number]
135
+
136
+ # Arrange ordering according to proprition of wins
137
+ row_beats_col = row_beats_col_freq.loc[model_order, model_order]
138
+ return row_beats_col
139
+
140
+
141
+ def visualize_leaderboard_table(rating):
142
+ models = list(rating.keys())
143
+ models.sort(key=lambda k: -rating[k])
144
+
145
+ emoji_dict = {
146
+ 1: "🥇",
147
+ 2: "🥈",
148
+ 3: "🥉",
149
+ }
150
+
151
+ md = ""
152
+ md += "| Rank | Model | Elo Rating | Description |\n"
153
+ md += "| --- | --- | --- | --- |\n"
154
+ for i, model in enumerate(models):
155
+ rank = i + 1
156
+ minfo = get_model_info(model)
157
+ emoji = emoji_dict.get(rank, "")
158
+ md += f"| {rank} | {emoji} [{model}]({minfo.link}) | {rating[model]:.0f} | {minfo.description} |\n"
159
+
160
+ return md
161
+
162
+
163
+ def visualize_pairwise_win_fraction(battles, model_order):
164
+ row_beats_col = compute_pairwise_win_fraction(battles, model_order)
165
+ fig = px.imshow(
166
+ row_beats_col,
167
+ color_continuous_scale="RdBu",
168
+ text_auto=".2f",
169
+ height=700,
170
+ width=700,
171
+ )
172
+ fig.update_layout(
173
+ xaxis_title="Model B",
174
+ yaxis_title="Model A",
175
+ xaxis_side="top",
176
+ title_y=0.07,
177
+ title_x=0.5,
178
+ )
179
+ fig.update_traces(
180
+ hovertemplate="Model A: %{y}<br>Model B: %{x}<br>Fraction of A Wins: %{z}<extra></extra>"
181
+ )
182
+
183
+ return fig
184
+
185
+
186
+ def visualize_battle_count(battles, model_order):
187
+ ptbl = pd.pivot_table(
188
+ battles, index="model_a", columns="model_b", aggfunc="size", fill_value=0
189
+ )
190
+ battle_counts = ptbl + ptbl.T
191
+ fig = px.imshow(
192
+ battle_counts.loc[model_order, model_order],
193
+ text_auto=True,
194
+ height=700,
195
+ width=700,
196
+ )
197
+ fig.update_layout(
198
+ xaxis_title="Model B",
199
+ yaxis_title="Model A",
200
+ xaxis_side="top",
201
+ title_y=0.07,
202
+ title_x=0.5,
203
+ )
204
+ fig.update_traces(
205
+ hovertemplate="Model A: %{y}<br>Model B: %{x}<br>Count: %{z}<extra></extra>"
206
+ )
207
+ return fig
208
+
209
+
210
+ def visualize_average_win_rate(battles, limit_show_number):
211
+ row_beats_col_freq = compute_pairwise_win_fraction(
212
+ battles, None, limit_show_number=limit_show_number
213
+ )
214
+ fig = px.bar(
215
+ row_beats_col_freq.mean(axis=1).sort_values(ascending=False),
216
+ text_auto=".2f",
217
+ height=500,
218
+ width=700,
219
+ )
220
+ fig.update_layout(
221
+ yaxis_title="Average Win Rate", xaxis_title="Model", showlegend=False
222
+ )
223
+ return fig
224
+
225
+
226
+ def visualize_bootstrap_elo_rating(df, df_final, limit_show_number):
227
+ bars = (
228
+ pd.DataFrame(
229
+ dict(
230
+ lower=df.quantile(0.025),
231
+ rating=df_final,
232
+ upper=df.quantile(0.975),
233
+ )
234
+ )
235
+ .reset_index(names="model")
236
+ .sort_values("rating", ascending=False)
237
+ )
238
+ bars = bars[:limit_show_number]
239
+ bars["error_y"] = bars["upper"] - bars["rating"]
240
+ bars["error_y_minus"] = bars["rating"] - bars["lower"]
241
+ bars["rating_rounded"] = np.round(bars["rating"], 2)
242
+ fig = px.scatter(
243
+ bars,
244
+ x="model",
245
+ y="rating",
246
+ error_y="error_y",
247
+ error_y_minus="error_y_minus",
248
+ text="rating_rounded",
249
+ height=500,
250
+ width=700,
251
+ )
252
+ fig.update_layout(xaxis_title="Model", yaxis_title="Rating")
253
+ return fig
254
+
255
+
256
+ def report_elo_analysis_results(battles_json, rating_system="bt", num_bootstrap=100, anony_only=True):
257
+ battles = pd.DataFrame(battles_json)
258
+ print(battles)
259
+ print(f"Data before filtering: {battles.shape}")
260
+ if battles.empty:
261
+ raise ValueError("The battles dataframe is empty. Check the data loading or filtering steps.")
262
+
263
+ battles = battles.sort_values(ascending=True, by=["tstamp"])
264
+ # Only use anonymous votes
265
+ if anony_only:
266
+ battles = battles[battles["anony"]].reset_index(drop=True)
267
+ if battles.empty:
268
+ raise ValueError("No anonymous battles found. Check the filtering logic.")
269
+
270
+ battles_no_ties = battles[~battles["winner"].str.contains("tie")]
271
+ print(f"Data after filtering: {battles_no_ties.shape}")
272
+ if battles_no_ties.empty:
273
+ raise ValueError("No valid battles after removing ties. Check the filtering logic.")
274
+ # Online update
275
+ elo_rating_online = compute_elo(battles)
276
+
277
+ if rating_system == "bt":
278
+ bootstrap_df = get_bootstrap_result(
279
+ battles, compute_elo_mle_with_tie, num_round=num_bootstrap
280
+ )
281
+ elo_rating_final = compute_elo_mle_with_tie(battles)
282
+ elif rating_system == "elo":
283
+ bootstrap_df = get_bootstrap_result(
284
+ battles, compute_elo, num_round=num_bootstrap
285
+ )
286
+ elo_rating_median = get_median_elo_from_bootstrap(bootstrap_df)
287
+ elo_rating_final = elo_rating_median
288
+
289
+ model_order = list(elo_rating_final.keys())
290
+ model_order.sort(key=lambda k: -elo_rating_final[k])
291
+
292
+ limit_show_number = 25 # limit show number to make plots smaller
293
+ model_order = model_order[:limit_show_number]
294
+
295
+ # leaderboard_table_df: elo rating, variance, 95% interval, number of battles
296
+ leaderboard_table_df = pd.DataFrame(
297
+ {
298
+ "rating": elo_rating_final,
299
+ "variance": bootstrap_df.var(),
300
+ "rating_q975": bootstrap_df.quantile(0.975),
301
+ "rating_q025": bootstrap_df.quantile(0.025),
302
+ "num_battles": battles["model_a"].value_counts()
303
+ + battles["model_b"].value_counts(),
304
+ }
305
+ )
306
+
307
+ # Plots
308
+ leaderboard_table = visualize_leaderboard_table(elo_rating_final)
309
+ win_fraction_heatmap = visualize_pairwise_win_fraction(battles_no_ties, model_order)
310
+ battle_count_heatmap = visualize_battle_count(battles_no_ties, model_order)
311
+ average_win_rate_bar = visualize_average_win_rate(
312
+ battles_no_ties, limit_show_number
313
+ )
314
+ bootstrap_elo_rating = visualize_bootstrap_elo_rating(
315
+ bootstrap_df, elo_rating_final, limit_show_number
316
+ )
317
+
318
+ last_updated_tstamp = battles["tstamp"].max()
319
+ last_updated_datetime = datetime.datetime.fromtimestamp(
320
+ last_updated_tstamp, tz=timezone("US/Pacific")
321
+ ).strftime("%Y-%m-%d %H:%M:%S %Z")
322
+
323
+ return {
324
+ "rating_system": rating_system,
325
+ "elo_rating_online": elo_rating_online,
326
+ "elo_rating_final": elo_rating_final,
327
+ "leaderboard_table": leaderboard_table,
328
+ "win_fraction_heatmap": win_fraction_heatmap,
329
+ "battle_count_heatmap": battle_count_heatmap,
330
+ "average_win_rate_bar": average_win_rate_bar,
331
+ "bootstrap_elo_rating": bootstrap_elo_rating,
332
+ "last_updated_datetime": last_updated_datetime,
333
+ "last_updated_tstamp": last_updated_tstamp,
334
+ "bootstrap_df": bootstrap_df,
335
+ "leaderboard_table_df": leaderboard_table_df,
336
+ }
337
+
338
+
339
+ def pretty_print_elo_rating(rating):
340
+ model_order = list(rating.keys())
341
+ model_order.sort(key=lambda k: -rating[k])
342
+ for i, model in enumerate(model_order):
343
+ print(f"{i+1:2d}, {model:25s}, {rating[model]:.0f}")
344
+
345
+
346
+ if __name__ == "__main__":
347
+ parser = argparse.ArgumentParser()
348
+ parser.add_argument("--clean-battle-file", type=str)
349
+ parser.add_argument("--max-num-files", type=int)
350
+ parser.add_argument("--num-bootstrap", type=int, default=100)
351
+ parser.add_argument(
352
+ "--rating-system", type=str, choices=["bt", "elo"], default="bt"
353
+ )
354
+ parser.add_argument("--exclude-tie", action="store_true", default=False)
355
+ args = parser.parse_args()
356
+
357
+ np.random.seed(42)
358
+
359
+ if args.clean_battle_file:
360
+ # Read data from a cleaned battle files
361
+ battles = pd.read_json(args.clean_battle_file)
362
+ else:
363
+ # Read data from all log files
364
+ log_files = get_log_files(args.max_num_files)
365
+ battles = clean_battle_data(log_files)
366
+
367
+ anony_results = report_elo_analysis_results(
368
+ battles, rating_system=args.rating_system, num_bootstrap=args.num_bootstrap, anony_only=True
369
+ )
370
+ full_results = report_elo_analysis_results(
371
+ battles, rating_system=args.rating_system, num_bootstrap=args.num_bootstrap, anony_only=False
372
+ )
373
+
374
+
375
+ print("# Online Elo")
376
+ pretty_print_elo_rating(anony_results["elo_rating_online"])
377
+ print("# Median")
378
+ pretty_print_elo_rating(anony_results["elo_rating_final"])
379
+ print(f"Annoy last update : {anony_results['last_updated_datetime']}")
380
+ print(f"Full last update : {full_results['last_updated_datetime']}")
381
+
382
+ last_updated_tstamp = full_results["last_updated_tstamp"]
383
+ cutoff_date = datetime.datetime.fromtimestamp(
384
+ last_updated_tstamp, tz=timezone("US/Pacific")
385
+ ).strftime("%Y%m%d")
386
+
387
+
388
+ results = {
389
+ "anony": anony_results,
390
+ "full": full_results,
391
+ }
392
+ with open("cut_off_date.txt", "w") as fout:
393
+ fout.write(cutoff_date)
394
+ with open(f"elo_results_{cutoff_date}.pkl", "wb") as fout:
395
+ pickle.dump(results, fout)
arena_elo/elo_rating/filter_clean_battle_data.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import argparse
3
+ import pandas as pd
4
+
5
+
6
+ if __name__ == "__main__":
7
+ parser = argparse.ArgumentParser()
8
+ parser.add_argument("--task_name", type=str, default="image_editing", choices=["image_editing", "t2i_generation", "video_generation"])
9
+ parser.add_argument("--clean-battle-file", type=str)
10
+ parser.add_argument("--model-info-file", type=str)
11
+
12
+ args = parser.parse_args()
13
+
14
+
15
+ if args.clean_battle_file:
16
+ print(args.clean_battle_file)
17
+ with open(args.clean_battle_file, 'r') as f:
18
+ battle_data = json.load(f)
19
+
20
+ if args.model_info_file:
21
+ with open(args.model_info_file, 'r') as f:
22
+ model_info = json.load(f)
23
+
24
+ valid_models = set(model_info.keys())
25
+
26
+ filtered_battle_data = [
27
+ entry for entry in battle_data
28
+ if entry['model_a'] in valid_models and entry['model_b'] in valid_models
29
+ ]
30
+
31
+ with open(f"filtered_battle_{args.task_name}.json", 'w') as f:
32
+ json.dump(filtered_battle_data, f, indent=2)
33
+
34
+ print(f"Filtered data saved to filtered_battle_{args.task_name}.json")
arena_elo/elo_rating/generate_leaderboard.py ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import csv
2
+ import fire
3
+ import json
4
+ import pandas as pd
5
+ import pickle
6
+
7
+
8
+ def main(
9
+ model_info_file: str,
10
+ elo_rating_pkl: str,
11
+ output_csv: str
12
+ ):
13
+ model_info = {}
14
+ with open(model_info_file, 'r', newline='', encoding='utf-8') as file:
15
+ csv_reader = csv.reader(file)
16
+ header = next(csv_reader)
17
+ for row in csv_reader:
18
+ if len(row) == 8: # Ensure that all columns are present in the row
19
+ name, upload_date, description, parameter_count, creator, result_path, license, link = row
20
+ model_info[name] = {
21
+ "upload_date": upload_date,
22
+ "description": description,
23
+ "parameter_count": parameter_count,
24
+ "creator": creator,
25
+ "result_path": result_path,
26
+ "license": license,
27
+ "link": link
28
+ }
29
+
30
+ with open(elo_rating_pkl, "rb") as fin:
31
+ elo_rating_results = pickle.load(fin)
32
+
33
+ anony_elo_rating_results = elo_rating_results["anony"]
34
+ full_elo_rating_results = elo_rating_results["full"]
35
+ anony_leaderboard_data = anony_elo_rating_results["leaderboard_table_df"]
36
+ full_leaderboard_data = full_elo_rating_results["leaderboard_table_df"]
37
+
38
+ fields = ["key", "Model", "Arena Elo rating (anony)", "Arena Elo rating (full)", "license", "creator", "link"]
39
+ # set creator and license to empty for now
40
+ all_models = anony_leaderboard_data.index.tolist()
41
+
42
+ for model in all_models:
43
+ if not model in model_info:
44
+ model_info[model] = {}
45
+ model_info[model]["license"] = "N/A"
46
+ model_info[model]["creator"] = "N/A"
47
+ model_info[model]["link"] = "N/A"
48
+ print(f"Model {model} not found in model_info.json")
49
+ #continue # 没有 model_info 不显示
50
+ model_info[model]["Model"] = model
51
+ model_info[model]["key"] = model
52
+
53
+ if model in anony_leaderboard_data.index:
54
+ model_info[model]["Arena Elo rating (anony)"] = anony_leaderboard_data.loc[model, "rating"]
55
+ else:
56
+ model_info[model]["Arena Elo rating (anony)"] = 0
57
+
58
+ if model in full_elo_rating_results["leaderboard_table_df"].index:
59
+ model_info[model]["Arena Elo rating (full)"] = full_leaderboard_data.loc[model, "rating"]
60
+ else:
61
+ model_info[model]["Arena Elo rating (full)"] = 0
62
+ # if model in anony_leaderboard_data.index:
63
+ # model_info[model]["Arena Elo rating"] = anony_leaderboard_data.loc[model, "rating"]
64
+ # else:
65
+ # model_info[model]["Arena Elo rating"] = 0
66
+
67
+ final_model_info = {}
68
+ for model in model_info:
69
+ if "Model" in model_info[model]:
70
+ final_model_info[model] = model_info[model]
71
+ model_info = final_model_info
72
+
73
+ exclude_keys = ['starting_from']
74
+ for key in exclude_keys:
75
+ for model in model_info:
76
+ if key in model_info[model]:
77
+ del model_info[model][key]
78
+ df = pd.DataFrame(model_info).T
79
+ df = df[fields]
80
+ # sort by anony rating
81
+ df = df.sort_values(by=["Arena Elo rating (anony)"], ascending=False)
82
+ df.to_csv(output_csv, index=False)
83
+ print("Leaderboard data saved to", output_csv)
84
+ print(df)
85
+
86
+
87
+ if __name__ == "__main__":
88
+ fire.Fire(main)
arena_elo/elo_rating/inspect_conv_rating.py ADDED
@@ -0,0 +1,234 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import code
3
+ import datetime
4
+ import json
5
+ import os
6
+ from pytz import timezone
7
+ import time
8
+
9
+ import pandas as pd
10
+ from tqdm import tqdm
11
+ import csv
12
+
13
+ import base64
14
+ from icecream import ic
15
+ from openai import OpenAI
16
+
17
+ # Function to encode the image
18
+ def encode_image(image_path):
19
+ with open(image_path, "rb") as image_file:
20
+ return base64.b64encode(image_file.read()).decode('utf-8')
21
+
22
+ def get_log_files(max_num_files=None):
23
+ dates = []
24
+ for month in [2, 3]:
25
+ for day in range(1, 32):
26
+ dates.append(f"2024-{month:02d}-{day:02d}")
27
+
28
+ num_servers = 1
29
+ filenames = []
30
+ for d in dates:
31
+ for i in range(num_servers):
32
+ # name = os.path.expanduser(f"~/fastchat_logs/server{i}/{d}-conv.json")
33
+ name = os.path.expanduser(f"vision-arena-logs/{d}-conv.json")
34
+ if os.path.exists(name):
35
+ filenames.append(name)
36
+ max_num_files = max_num_files or len(filenames)
37
+ filenames = filenames[-max_num_files:]
38
+ return filenames
39
+
40
+
41
+ def pretty_print_conversation(messages):
42
+ for role, msg in messages:
43
+ print(f"[[{role}]]: {msg}")
44
+
45
+
46
+ def get_gpt4v_response(client, img_bs64=None, text_prompt="", use_vision=False):
47
+ if use_vision:
48
+ response = client.chat.completions.create(
49
+ model="gpt-4-vision-preview",
50
+ messages=[
51
+ {
52
+ "role": "user",
53
+ "content": [
54
+ {"type": "text", "text": text_prompt},
55
+ {
56
+ "type": "image_url",
57
+ "image_url": {
58
+ "url": f"data:image/jpeg;base64,{img_bs64}"
59
+ }
60
+ },
61
+ ],
62
+ }
63
+ ],
64
+ max_tokens=100,
65
+ )
66
+ else:
67
+ response = client.chat.completions.create(
68
+ model="gpt-4-vision-preview",
69
+ messages=[
70
+ {
71
+ "role": "user",
72
+ "content": [
73
+ {"type": "text", "text": text_prompt},
74
+ ],
75
+ }
76
+ ],
77
+ max_tokens=100,
78
+ )
79
+ return response.choices[0].message.content
80
+
81
+ task_template_map = {
82
+ "image_caption": "Give me the semantic alignment score between the given image and the given caption: \"{generated_sentence}\" on a scale of 0-100. Only reply the score value.",
83
+ "vqa": "Rate the answer correctness regarding the question within the context of the given image on a scale of 0-100. Only reply the score value.",
84
+ "pair_rate_old": "[Instruction]\n\"{instruction}\"\n\n\"{generated_sentence}\"\n\n[System]\nGiven the instruction and the image, please compare the correctness of responses A and B. Reply with \"leftvote\" if you find A better, \"rightvote\" if B is better, \"bothbad_vote\" if both responses are wrong, and \"tievote\" if both responses are equally satisfactory. If you are unable to make a decision, please reply with \"NA\".",
85
+ "pair_rate_wexplanation": "[Instruction]\n\"{instruction}\"\n\n\"{generated_sentence}\"[System]\nPlease act as an impartial judge and evaluate the quality of the responses provided by two AI assistants to the user question displayed below. You should choose the assistant that follows the user’s instructions and answers the user’s question better. Your evaluation should consider factors such as the helpfulness, relevance, accuracy, depth, creativity, and level of detail of their responses. Begin your evaluation by comparing the two responses and provide a short explanation. Avoid any positional biases and ensure that the order in which the responses were presented does not influence your decision. Do not allow the length of the responses to influence your evaluation. Do not favor certain names of the assistants. Be as objective as possible. After providing your explanation, output your final verdict by strictly following this format: \"[[A]]\" if assistant A is better, \"[[B]]\" if assistant B is better, and \"[[C]]\" for a tie.",
86
+ "pair_rate": "[Instruction]\n\"{instruction}\"\n\n\"{generated_sentence}\"\n\n[System]\nPlease act as an impartial judge and evaluate the quality of the responses provided by two AI assistants to the user question displayed below. You should choose the assistant that follows the user’s instructions and answers the user’s question better. Your evaluation should consider factors such as the helpfulness, relevance, accuracy, depth, creativity, and level of detail of their responses. Begin your evaluation by comparing the two responses and provide a short explanation. Avoid any positional biases and ensure that the order in which the responses were presented does not influence your decision. Do not allow the length of the responses to influence your evaluation. Do not favor certain names of the assistants. Be as objective as possible. Reply with \"leftvote\" if you find assistant A better, \"rightvote\" if assistant B is better, \"bothbad_vote\" if both responses are wrong, and \"tievote\" if both assistants provide equally satisfactory answers. If you are unable to make a decision, please reply with \"NA\"."
87
+ }
88
+
89
+ def inspect_convs(log_files):
90
+ ic(log_files)
91
+ data = []
92
+ total_vote = 0
93
+ correct_vote = 0
94
+
95
+ client = OpenAI()
96
+ with open('all_pairvote_log_wgpt_prtchatbot.csv', 'w', newline='') as csvfile:
97
+ # fieldnames = ['tstamp', 'type', 'model_1', 'model_2', 'template_name_1', 'template_name_2', 'system_message_1', 'system_message_2', 'role_1', 'role_2', 'instruction_1', 'instruction_2', 'message_1', 'message_2', 'offset_1', 'offset_2', 'conv_id_1', 'conv_id_2', 'model_name_1', 'model_name_2', 'ip']
98
+ fieldnames = ['tstamp', 'type', 'models', 'states', 'ip', 'gpt_vote']
99
+ writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
100
+
101
+ # Write the header
102
+ writer.writeheader()
103
+
104
+ for filename in tqdm(log_files, desc="read files"):
105
+ for retry in range(5):
106
+ try:
107
+ lines = open(filename).readlines()
108
+ break
109
+ except FileNotFoundError:
110
+ time.sleep(2)
111
+
112
+ for l in lines:
113
+ row = json.loads(l)
114
+
115
+ if "states" not in row:
116
+ continue
117
+ if row["type"] not in ["leftvote", "rightvote", "bothbad_vote", "tievote"]:
118
+ continue
119
+
120
+ model_names = row["states"][0]["model_name"], row["states"][1]["model_name"]
121
+
122
+
123
+ # Iterate through each state and write the relevant information
124
+ if not len(row["states"][0]['messages']): continue
125
+ # ic(row["states"][0]['messages'][1][1])
126
+
127
+ if row["states"][0]['messages'][1][1] is None or row["states"][1]['messages'][1][1] is None or "NETWORK ERROR" in row["states"][0]['messages'][1][1] or "NETWORK ERROR" in row["states"][1]['messages'][1][1]: continue
128
+ total_vote += 1
129
+ # row = {
130
+ # 'tstamp': row['tstamp'],
131
+ # 'type': row['type'],
132
+ # 'model_1': row['models'][0],
133
+ # 'model_2': row['models'][1],
134
+ # 'template_name_1': row["states"][0]['template_name'],
135
+ # 'system_message_1': row["states"][0]['system_message'],
136
+ # 'template_name_2': row["states"][1]['template_name'],
137
+ # 'system_message_2': row["states"][1]['system_message'],
138
+ # 'role_1': row["states"][0]['roles'],
139
+ # 'role_2': row["states"][1]['roles'],
140
+ # 'instruction_1': row["states"][0]['messages'][0][1],
141
+ # 'instruction_2': row["states"][1]['messages'][0][1],
142
+ # 'message_1': row["states"][0]['messages'][1][1],
143
+ # 'message_2': row["states"][1]['messages'][1][1],
144
+ # 'offset_1': row["states"][0]['offset'],
145
+ # 'offset_2': row["states"][1]['offset'],
146
+ # 'conv_id_1': row["states"][0]['conv_id'],
147
+ # 'conv_id_2': row["states"][1]['conv_id'],
148
+ # 'model_name_1': row["states"][0]['model_name'],
149
+ # 'model_name_2': row["states"][1]['model_name'],
150
+ # 'ip': row['ip']
151
+ # }
152
+ # writer.writerow(row)
153
+ # Convert complex objects to JSON strings
154
+ # TODO: check two image are the same
155
+ conv_id = row["states"][0]['conv_id']
156
+ image_path = os.path.join("/local/home/yujielu/project/Arena-Elo/vision-arena-logs", os.path.basename(filename)[:-5]+"input_images", f"input_image_{conv_id}.png")
157
+ if not os.path.exists(image_path):
158
+ response = "NA"
159
+ ic(image_path)
160
+ else:
161
+ base64_image = encode_image(image_path)
162
+ left_response = row["states"][0]['messages'][1][1]
163
+ right_response = row["states"][1]['messages'][1][1]
164
+ sep = "-" * 20
165
+ instruction = row["states"][0]['messages'][0][1]
166
+ generated_sentence = f"[The Start of Assistant A’s Answer]\n{left_response}\n[The End of Assistant A’s Answer]\n\n[The Start of Assistant B’s Answer]\n{right_response}\n[The End of Assistant B’s Answer]"
167
+ text_prompt = task_template_map["pair_rate"].format(instruction=instruction, generated_sentence=generated_sentence)
168
+ # ic(text_prompt)
169
+ try:
170
+ response = get_gpt4v_response(client, img_bs64=base64_image, text_prompt=text_prompt, use_vision=True)
171
+ except:
172
+ ic(">>> skip")
173
+ response = "NA"
174
+
175
+ # response = get_gpt4v_response(client, img_bs64=base64_image, text_prompt=text_prompt, use_vision=True)
176
+ ic(row['type'], response)
177
+ if response.strip() not in ["leftvote", "rightvote", "bothbad_vote", "tievote"]:
178
+ response = "NA"
179
+ # ic(generated_sentence)
180
+
181
+ # if row['type'] == "leftvote":
182
+ # row['type'] = "A"
183
+ # elif row['type'] == "rightvote":
184
+ # row['type'] = "B"
185
+ # elif row['type'] in ["bothbad_vote", "tievote"]:
186
+ # row['type'] = "C"
187
+ if row['type'] == response.strip():
188
+ correct_vote += 1
189
+ row['models'] = json.dumps(row['models'])
190
+ row['states'] = json.dumps(row['states'], ensure_ascii=False)
191
+ row['gpt_vote'] = response
192
+
193
+ # Write the modified row to the CSV file
194
+ writer.writerow(row)
195
+ # if row["type"] == "leftvote":
196
+ # winner, loser = model_names[0], model_names[1]
197
+ # winner_conv, loser_conv = row["states"][0], row["states"][1]
198
+ # elif row["type"] == "rightvote":
199
+ # loser, winner = model_names[0], model_names[1]
200
+ # loser_conv, winner_conv = row["states"][0], row["states"][1]
201
+
202
+ # if loser == "llava-v1.5-13b" and winner == "llava-v1.5-13b":
203
+ # print("=" * 20)
204
+ # print(f"Winner: {winner}")
205
+ # pretty_print_conversation(winner_conv["messages"])
206
+ # print(f"Loser: {loser}")
207
+ # pretty_print_conversation(loser_conv["messages"])
208
+ # print("=" * 20)
209
+ # input()
210
+ # if row['type'] == 'bothbad_vote':
211
+ # from icecream import ic
212
+ # ic(model_names)
213
+ # if row["type"] == "bothbad_vote" and "gpt-4-vision-preview" in model_names:
214
+ # print("=" * 20)
215
+ # print(f"Model A: {model_names[0]}")
216
+ # pretty_print_conversation(row["states"][0]["messages"])
217
+ # print(f"Model B: {model_names[1]}")
218
+ # pretty_print_conversation(row["states"][1]["messages"])
219
+ # print("=" * 20)
220
+ # input()
221
+ # if correct_vote >= 300: break
222
+ ic(total_vote, correct_vote)
223
+
224
+
225
+ if __name__ == "__main__":
226
+ parser = argparse.ArgumentParser()
227
+ parser.add_argument("--max-num-files", type=int)
228
+ args = parser.parse_args()
229
+
230
+ log_files = get_log_files(args.max_num_files)
231
+
232
+
233
+
234
+ inspect_convs(log_files)
arena_elo/elo_rating/inspect_cost.py ADDED
@@ -0,0 +1,177 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import fire
2
+ import time
3
+ import json
4
+ from collections import defaultdict
5
+ from .basic_stats import get_log_files, NUM_SERVERS, LOG_ROOT_DIR
6
+ from .utils import detect_language, get_time_stamp_from_date, get_input_image_path, load_image_from_path
7
+ from tqdm import tqdm
8
+ VOTES = ["tievote", "leftvote", "rightvote", "bothbad_vote", "chat"]
9
+
10
+
11
+ def remove_html(raw):
12
+ if raw.startswith("<h3>"):
13
+ return raw[raw.find(": ") + 2 : -len("</h3>\n")]
14
+ if raw.startswith("### Model A: ") or raw.startswith("### Model B: "):
15
+ return raw[13:]
16
+ return raw
17
+
18
+
19
+ def read_file(filename):
20
+ data = []
21
+ for retry in range(5):
22
+ try:
23
+ # lines = open(filename).readlines()
24
+ for l in open(filename):
25
+ row = json.loads(l)
26
+ if row["type"] in VOTES:
27
+ data.append(row)
28
+ break
29
+ except FileNotFoundError:
30
+ time.sleep(2)
31
+ return data
32
+
33
+
34
+ def read_file_parallel(log_files, num_threads=16):
35
+ data_all = []
36
+ from multiprocessing import Pool
37
+
38
+ with Pool(num_threads) as p:
39
+ ret_all = list(tqdm(p.imap(read_file, log_files), total=len(log_files)))
40
+ for ret in ret_all:
41
+ data_all.extend(ret)
42
+ return data_all
43
+
44
+ def num_tokens(s:str):
45
+ if s is None:
46
+ return 0
47
+ return len(s) / 4
48
+
49
+ def main(
50
+ ):
51
+ log_files = get_log_files()
52
+ data = read_file_parallel(log_files)
53
+
54
+ all_model_counts = defaultdict(int)
55
+ all_model_input_tokens_counts = defaultdict(list)
56
+ all_model_output_tokens_counts = defaultdict(list)
57
+ all_model_image_sizes = defaultdict(list)
58
+ chat_battle_counts = defaultdict(int)
59
+ for row in tqdm(data, desc="counting"):
60
+ if row['type'] == "chat":
61
+ chat_battle_counts["chat"] += 1
62
+ all_model_counts[row['model']] += 1
63
+ tstamp = row["tstamp"]
64
+ conv_id = row["state"]["conv_id"]
65
+
66
+ image = load_image_from_path(get_input_image_path(tstamp, conv_id))
67
+ if image is None:
68
+ image_size = None
69
+ else:
70
+ image_size = load_image_from_path(get_input_image_path(tstamp, conv_id)).size
71
+ all_model_image_sizes[row['model']].append(image_size)
72
+ try:
73
+ for message in row["state"]["messages"][row["state"]["offset"] :: 2]:
74
+ all_model_input_tokens_counts[row['model']].append(num_tokens(message[1]))
75
+ for message in row["state"]["messages"][row["state"]["offset"] + 1 :: 2]:
76
+ all_model_output_tokens_counts[row['model']].append(num_tokens(message[1]))
77
+ except Exception as e:
78
+ print(row)
79
+ raise e
80
+
81
+ else:
82
+ chat_battle_counts[row['type']] += 1
83
+ if row["models"][0] is None or row["models"][1] is None:
84
+ continue
85
+
86
+ # Resolve model names
87
+ models_public = [remove_html(row["models"][0]), remove_html(row["models"][1])]
88
+ if "model_name" in row["states"][0]:
89
+ models_hidden = [
90
+ row["states"][0]["model_name"],
91
+ row["states"][1]["model_name"],
92
+ ]
93
+ if models_hidden[0] is None:
94
+ models_hidden = models_public
95
+ else:
96
+ models_hidden = models_public
97
+
98
+ if (models_public[0] == "" and models_public[1] != "") or (
99
+ models_public[1] == "" and models_public[0] != ""
100
+ ):
101
+ continue
102
+
103
+ if models_public[0] == "" or models_public[0] == "Model A":
104
+ anony = True
105
+ models = models_hidden
106
+ else:
107
+ anony = False
108
+ models = models_public
109
+ if not models_public == models_hidden:
110
+ continue
111
+
112
+ all_model_counts[models[0]] += 1
113
+ all_model_counts[models[1]] += 1
114
+ tstamp = row["tstamp"]
115
+ conv_id1 = row["states"][0]["conv_id"]
116
+ conv_id2 = row["states"][1]["conv_id"]
117
+
118
+ image1 = load_image_from_path(get_input_image_path(tstamp, conv_id1))
119
+ image2 = load_image_from_path(get_input_image_path(tstamp, conv_id2))
120
+ all_model_image_sizes[models[0]].append(None if image1 is None else image1.size)
121
+ all_model_image_sizes[models[1]].append(None if image2 is None else image2.size)
122
+
123
+ for message in row["states"][0]["messages"][row["states"][0]["offset"] :: 2]:
124
+ all_model_input_tokens_counts[models[0]].append(num_tokens(message[1]))
125
+ for message in row["states"][0]["messages"][row["states"][0]["offset"] + 1 :: 2]:
126
+ all_model_output_tokens_counts[models[0]].append(num_tokens(message[1]))
127
+ for message in row["states"][1]["messages"][row["states"][1]["offset"] :: 2]:
128
+ all_model_input_tokens_counts[models[1]].append(num_tokens(message[1]))
129
+ for message in row["states"][1]["messages"][row["states"][1]["offset"] + 1 :: 2]:
130
+ all_model_output_tokens_counts[models[1]].append(num_tokens(message[1]))
131
+
132
+ print("### Chat battle counts (requests)")
133
+ print(json.dumps(chat_battle_counts, indent=4))
134
+
135
+ print("### Model counts (requests)")
136
+ print(json.dumps(all_model_counts, indent=4))
137
+
138
+ print("### Model Avg input tokens counts (tokens)")
139
+ average_input_tokens_counts = {}
140
+ for model, counts in all_model_input_tokens_counts.items():
141
+ average_input_tokens_counts[model] = sum(counts) / len(counts)
142
+ print(json.dumps(average_input_tokens_counts, indent=4))
143
+
144
+ print("### Model AVg output tokens counts (tokens)")
145
+ average_output_tokens_counts = {}
146
+ for model, counts in all_model_output_tokens_counts.items():
147
+ average_output_tokens_counts[model] = sum(counts) / len(counts)
148
+ print(json.dumps(average_output_tokens_counts, indent=4))
149
+
150
+ print("### Model Avg image sizes (height, width)")
151
+ average_image_sizes = {}
152
+ for model, sizes in all_model_image_sizes.items():
153
+ avg_height = sum([size[0] for size in sizes if size is not None]) / len(sizes)
154
+ avg_width = sum([size[1] for size in sizes if size is not None]) / len(sizes)
155
+ average_image_sizes[model] = (avg_height, avg_width)
156
+ print(json.dumps(average_image_sizes, indent=4))
157
+
158
+ print("### GPT-4V estimated cost (USD)")
159
+ gpt_4v_name = "gpt-4-vision-preview"
160
+ gpt_4v_cost = {}
161
+ gpt_4v_cost['input'] = sum(all_model_input_tokens_counts[gpt_4v_name]) / 1000 * 0.01
162
+ gpt_4v_cost['output'] = sum(all_model_output_tokens_counts[gpt_4v_name]) / 1000 * 0.03
163
+
164
+ all_image_cost = 0
165
+ for size in all_model_image_sizes[gpt_4v_name]:
166
+ if size is None:
167
+ continue
168
+ all_image_tokens = (size[0] // 512 + 1) * (size[1] // 512 + 1) * 170 + 85
169
+ all_image_cost += all_image_tokens / 1000 * 0.01
170
+ gpt_4v_cost['image'] = all_image_cost
171
+ print(json.dumps(gpt_4v_cost, indent=4))
172
+
173
+
174
+
175
+
176
+ if __name__ == "__main__":
177
+ fire.Fire(main)
arena_elo/elo_rating/inspect_elo_rating_pkl.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pickle
2
+ import plotly.graph_objects as go
3
+
4
+ def output_figure(data, figure_name="battle_count_heatmap", label="annoy"):
5
+ fig = data[label][figure_name]
6
+ fig.update_layout(
7
+ height=700,
8
+ width=700,
9
+ title={'text': f'{figure_name}', 'x': 0.5, 'y': 0.07},
10
+ xaxis_title="Model B",
11
+ yaxis_title="Model A",
12
+ # coloraxis_colorscale=[[0.0, '#0d0887'], [1.0, '#f0f921']],
13
+ margin={'t': 60}
14
+ )
15
+ fig.write_image(f"{figure_name}.png")
16
+
17
+ with open("./results/latest/elo_results.pkl",'rb') as f:
18
+ data = pickle.load(f)
19
+ print()
20
+ df = data["anony"]["leaderboard_table_df"]
21
+ # sort by rating
22
+ print(data["anony"].keys())
23
+
24
+ for figure_name in [ 'win_fraction_heatmap', 'battle_count_heatmap',]:
25
+ output_figure(data, figure_name, "anony")
26
+
27
+ df = df.sort_values(by=["rating"], ascending=False)
28
+ print(df)
29
+ df = data["full"]["leaderboard_table_df"]
30
+ # sort by rating
31
+ df = df.sort_values(by=["rating"], ascending=False)
32
+ print(df)
33
+ print('done')
arena_elo/elo_rating/model_registry.py ADDED
@@ -0,0 +1,578 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Additional information of the models."""
2
+ from collections import namedtuple, OrderedDict
3
+ from typing import List
4
+
5
+
6
+ ModelInfo = namedtuple("ModelInfo", ["simple_name", "link", "description"])
7
+
8
+
9
+ model_info = OrderedDict()
10
+
11
+
12
+ def register_model_info(
13
+ full_names: List[str], simple_name: str, link: str, description: str
14
+ ):
15
+ info = ModelInfo(simple_name, link, description)
16
+
17
+ for full_name in full_names:
18
+ model_info[full_name] = info
19
+
20
+
21
+ def get_model_info(name: str) -> ModelInfo:
22
+ if name in model_info:
23
+ return model_info[name]
24
+ else:
25
+ # To fix this, please use `register_model_info` to register your model
26
+ return ModelInfo(
27
+ name, "", "Register the description at arena.model/model_registry.py"
28
+ )
29
+
30
+
31
+ register_model_info(
32
+ [
33
+ "IEITYuan/Yuan2-2B-Janus-hf",
34
+ "IEITYuan/Yuan2-2B-hf",
35
+ "IEITYuan/Yuan2-51B-hf",
36
+ "IEITYuan/Yuan2-102B-hf",
37
+ ],
38
+ "IEIT-Yuan2",
39
+ "https://github.com/IEIT-Yuan/Yuan-2.0",
40
+ "Yuan2.0 is a new generation Fundamental Large Language Model developed by IEIT System.",
41
+ )
42
+
43
+ register_model_info(
44
+ ["mixtral-8x7b-instruct-v0.1", "mistral-7b-instruct"],
45
+ "Mixtral of experts",
46
+ "https://mistral.ai/news/mixtral-of-experts/",
47
+ "A Mixture-of-Experts model by Mistral AI",
48
+ )
49
+
50
+ register_model_info(
51
+ ["gemini-pro"],
52
+ "Gemini",
53
+ "https://blog.google/technology/ai/google-gemini-pro-imagen-duet-ai-update/",
54
+ "Gemini by Google",
55
+ )
56
+
57
+ register_model_info(
58
+ ["gemini-pro-vision"],
59
+ "Gemini",
60
+ "https://blog.google/technology/ai/google-gemini-pro-imagen-duet-ai-update/",
61
+ "Gemini by Google",
62
+ )
63
+
64
+ register_model_info(
65
+ ["solar-10.7b-instruct-v1.0"],
66
+ "SOLAR-10.7B-Instruct",
67
+ "https://huggingface.co/upstage/SOLAR-10.7B-Instruct-v1.0",
68
+ "A model trained using depth up-scaling by Upstage AI",
69
+ )
70
+
71
+ register_model_info(
72
+ ["gpt-4-turbo"],
73
+ "GPT-4-Turbo",
74
+ "https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo",
75
+ "GPT-4-Turbo by OpenAI",
76
+ )
77
+
78
+ register_model_info(
79
+ ["gpt-4-vision-preview"],
80
+ "gpt-4-vision-preview",
81
+ "https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo",
82
+ "GPT-4(V) by OpenAI",
83
+ )
84
+
85
+ register_model_info(
86
+ ["gpt-3.5-turbo", "gpt-3.5-turbo-0314", "gpt-3.5-turbo-0613", "gpt-3.5-turbo-1106"],
87
+ "GPT-3.5",
88
+ "https://platform.openai.com/docs/models/gpt-3-5",
89
+ "GPT-3.5-Turbo by OpenAI",
90
+ )
91
+
92
+ register_model_info(
93
+ ["gpt-4", "gpt-4-0314", "gpt-4-0613"],
94
+ "GPT-4",
95
+ "https://openai.com/research/gpt-4",
96
+ "GPT-4 by OpenAI",
97
+ )
98
+
99
+ register_model_info(
100
+ ["claude-2.1", "claude-2.0"],
101
+ "Claude",
102
+ "https://www.anthropic.com/index/claude-2",
103
+ "Claude 2 by Anthropic",
104
+ )
105
+
106
+ register_model_info(
107
+ ["claude-1"],
108
+ "Claude",
109
+ "https://www.anthropic.com/index/introducing-claude",
110
+ "Claude 1 by Anthropic",
111
+ )
112
+
113
+ register_model_info(
114
+ ["claude-instant-1", "claude-instant-1.2"],
115
+ "Claude Instant",
116
+ "https://www.anthropic.com/index/introducing-claude",
117
+ "Claude Instant by Anthropic",
118
+ )
119
+
120
+ register_model_info(
121
+ ["pplx-70b-online", "pplx-7b-online"],
122
+ "pplx-online-llms",
123
+ "https://blog.perplexity.ai/blog/introducing-pplx-online-llms",
124
+ "Online LLM API by Perplexity AI",
125
+ )
126
+
127
+ register_model_info(
128
+ ["openhermes-2.5-mistral-7b"],
129
+ "OpenHermes-2.5-Mistral-7B",
130
+ "https://huggingface.co/teknium/OpenHermes-2.5-Mistral-7B",
131
+ "a mistral-based model fine-tuned on 1M GPT-4 outputs",
132
+ )
133
+
134
+ register_model_info(
135
+ ["starling-lm-7b-alpha"],
136
+ "Starling-LM-7B-alpha",
137
+ "https://huggingface.co/berkeley-nest/Starling-LM-7B-alpha",
138
+ "an open model trained using RLAIF by Berkeley",
139
+ )
140
+
141
+ register_model_info(
142
+ ["tulu-2-dpo-70b"],
143
+ "Tulu 2",
144
+ "https://huggingface.co/allenai/tulu-2-dpo-70b",
145
+ "an instruction and RLHF model by UW/AllenAI",
146
+ )
147
+
148
+ register_model_info(
149
+ ["yi-34b-chat", "yi-6b-chat"],
150
+ "Yi-Chat",
151
+ "https://huggingface.co/01-ai/Yi-34B-Chat",
152
+ "A large language model by 01 AI",
153
+ )
154
+
155
+ register_model_info(
156
+ ["llama-2-70b-chat", "llama-2-34b-chat", "llama-2-13b-chat", "llama-2-7b-chat"],
157
+ "Llama 2",
158
+ "https://ai.meta.com/llama/",
159
+ "open foundation and fine-tuned chat models by Meta",
160
+ )
161
+
162
+ register_model_info(
163
+ [
164
+ "vicuna-33b",
165
+ "vicuna-33b-v1.3",
166
+ "vicuna-13b",
167
+ "vicuna-13b-v1.3",
168
+ "vicuna-7b",
169
+ "vicuna-7b-v1.3",
170
+ ],
171
+ "Vicuna",
172
+ "https://lmsys.org/blog/2023-03-30-vicuna/",
173
+ "a chat assistant fine-tuned on user-shared conversations by LMSYS",
174
+ )
175
+
176
+ register_model_info(
177
+ ["chatglm3-6b", "chatglm2-6b", "chatglm-6b"],
178
+ "ChatGLM",
179
+ "https://chatglm.cn/blog",
180
+ "an open bilingual dialogue language model by Tsinghua University",
181
+ )
182
+
183
+ register_model_info(
184
+ ["openchat-3.5"],
185
+ "OpenChat 3.5",
186
+ "https://github.com/imoneoi/openchat",
187
+ "an open model fine-tuned on Mistral-7B using C-RLFT",
188
+ )
189
+
190
+ register_model_info(
191
+ ["tenyxchat-7b-v1"],
192
+ "TenyxChat-7B",
193
+ "https://huggingface.co/tenyx/TenyxChat-7B-v1",
194
+ "an open model DPO trained on top of OpenChat-3.5 using Tenyx fine-tuning",
195
+ )
196
+
197
+ register_model_info(
198
+ ["zephyr-7b-beta", "zephyr-7b-alpha"],
199
+ "Zephyr",
200
+ "https://huggingface.co/HuggingFaceH4/zephyr-7b-alpha",
201
+ "a chatbot fine-tuned from Mistral by Hugging Face",
202
+ )
203
+
204
+ register_model_info(
205
+ ["notus-7b-v1"],
206
+ "Notus",
207
+ "https://huggingface.co/argilla/notus-7b-v1",
208
+ "a chatbot fine-tuned from Zephyr SFT by Argilla",
209
+ )
210
+
211
+ register_model_info(
212
+ ["catppt"],
213
+ "CatPPT",
214
+ "https://huggingface.co/rishiraj/CatPPT",
215
+ "a chatbot fine-tuned from a SLERP merged model by Rishiraj Acharya",
216
+ )
217
+
218
+ register_model_info(
219
+ ["TinyLlama"],
220
+ "TinyLlama",
221
+ "https://huggingface.co/TinyLlama/TinyLlama-1.1B-Chat-v1.0",
222
+ "The TinyLlama project is an open endeavor to pretrain a 1.1B Llama model on 3 trillion tokens.",
223
+ )
224
+
225
+ register_model_info(
226
+ ["qwen-14b-chat"],
227
+ "Qwen",
228
+ "https://huggingface.co/Qwen/Qwen-14B-Chat",
229
+ "a large language model by Alibaba Cloud",
230
+ )
231
+
232
+ register_model_info(
233
+ ["codellama-34b-instruct", "codellama-13b-instruct", "codellama-7b-instruct"],
234
+ "Code Llama",
235
+ "https://ai.meta.com/blog/code-llama-large-language-model-coding/",
236
+ "open foundation models for code by Meta",
237
+ )
238
+
239
+ register_model_info(
240
+ ["wizardlm-70b", "wizardlm-30b", "wizardlm-13b"],
241
+ "WizardLM",
242
+ "https://github.com/nlpxucan/WizardLM",
243
+ "an instruction-following LLM using evol-instruct by Microsoft",
244
+ )
245
+
246
+ register_model_info(
247
+ ["wizardcoder-15b-v1.0"],
248
+ "WizardLM",
249
+ "https://github.com/nlpxucan/WizardLM/tree/main/WizardCoder",
250
+ "Empowering Code Large Language Models with Evol-Instruct",
251
+ )
252
+
253
+ register_model_info(
254
+ ["mpt-7b-chat", "mpt-30b-chat"],
255
+ "MPT-Chat",
256
+ "https://www.mosaicml.com/blog/mpt-30b",
257
+ "a chatbot fine-tuned from MPT by MosaicML",
258
+ )
259
+
260
+ register_model_info(
261
+ ["guanaco-33b", "guanaco-65b"],
262
+ "Guanaco",
263
+ "https://github.com/artidoro/qlora",
264
+ "a model fine-tuned with QLoRA by UW",
265
+ )
266
+
267
+ register_model_info(
268
+ ["gpt4all-13b-snoozy"],
269
+ "GPT4All-Snoozy",
270
+ "https://github.com/nomic-ai/gpt4all",
271
+ "a finetuned LLaMA model on assistant style data by Nomic AI",
272
+ )
273
+
274
+ register_model_info(
275
+ ["koala-13b"],
276
+ "Koala",
277
+ "https://bair.berkeley.edu/blog/2023/04/03/koala",
278
+ "a dialogue model for academic research by BAIR",
279
+ )
280
+
281
+ register_model_info(
282
+ ["RWKV-4-Raven-14B"],
283
+ "RWKV-4-Raven",
284
+ "https://huggingface.co/BlinkDL/rwkv-4-raven",
285
+ "an RNN with transformer-level LLM performance",
286
+ )
287
+
288
+ register_model_info(
289
+ ["alpaca-13b"],
290
+ "Alpaca",
291
+ "https://crfm.stanford.edu/2023/03/13/alpaca.html",
292
+ "a model fine-tuned from LLaMA on instruction-following demonstrations by Stanford",
293
+ )
294
+
295
+ register_model_info(
296
+ ["oasst-pythia-12b"],
297
+ "OpenAssistant (oasst)",
298
+ "https://open-assistant.io",
299
+ "an Open Assistant for everyone by LAION",
300
+ )
301
+
302
+ register_model_info(
303
+ ["oasst-sft-7-llama-30b"],
304
+ "OpenAssistant (oasst)",
305
+ "https://open-assistant.io",
306
+ "an Open Assistant for everyone by LAION",
307
+ )
308
+
309
+ register_model_info(
310
+ ["palm-2"],
311
+ "PaLM 2 Chat",
312
+ "https://cloud.google.com/vertex-ai/docs/release-notes#May_10_2023",
313
+ "PaLM 2 for Chat (chat-bison@001) by Google",
314
+ )
315
+
316
+ register_model_info(
317
+ ["llama-7b", "llama-13b"],
318
+ "LLaMA",
319
+ "https://arxiv.org/abs/2302.13971",
320
+ "open and efficient foundation language models by Meta",
321
+ )
322
+
323
+ register_model_info(
324
+ ["open-llama-7b-v2-open-instruct", "open-llama-7b-open-instruct"],
325
+ "Open LLaMa (Open Instruct)",
326
+ "https://medium.com/vmware-data-ml-blog/starter-llm-for-the-enterprise-instruction-tuning-openllama-7b-d05fc3bbaccc",
327
+ "Open LLaMa fine-tuned on instruction-following data by VMware",
328
+ )
329
+
330
+ register_model_info(
331
+ ["dolly-v2-12b"],
332
+ "Dolly",
333
+ "https://www.databricks.com/blog/2023/04/12/dolly-first-open-commercially-viable-instruction-tuned-llm",
334
+ "an instruction-tuned open large language model by Databricks",
335
+ )
336
+
337
+ register_model_info(
338
+ ["stablelm-tuned-alpha-7b"],
339
+ "StableLM",
340
+ "https://github.com/stability-AI/stableLM",
341
+ "Stability AI language models",
342
+ )
343
+
344
+ register_model_info(
345
+ ["codet5p-6b"],
346
+ "CodeT5p-6b",
347
+ "https://huggingface.co/Salesforce/codet5p-6b",
348
+ "Code completion model released by Salesforce",
349
+ )
350
+
351
+ register_model_info(
352
+ ["fastchat-t5-3b", "fastchat-t5-3b-v1.0"],
353
+ "FastChat-T5",
354
+ "https://huggingface.co/lmsys/fastchat-t5-3b-v1.0",
355
+ "a chat assistant fine-tuned from FLAN-T5 by LMSYS",
356
+ )
357
+
358
+ register_model_info(
359
+ ["phoenix-inst-chat-7b"],
360
+ "Phoenix-7B",
361
+ "https://huggingface.co/FreedomIntelligence/phoenix-inst-chat-7b",
362
+ "a multilingual chat assistant fine-tuned from Bloomz to democratize ChatGPT across languages by CUHK(SZ)",
363
+ )
364
+
365
+ register_model_info(
366
+ ["realm-7b-v1"],
367
+ "ReaLM",
368
+ "https://github.com/FreedomIntelligence/ReaLM",
369
+ "A chatbot fine-tuned from LLaMA2 with data generated via iterative calls to UserGPT and ChatGPT by CUHK(SZ) and SRIBD.",
370
+ )
371
+
372
+ register_model_info(
373
+ ["billa-7b-sft"],
374
+ "BiLLa-7B-SFT",
375
+ "https://huggingface.co/Neutralzz/BiLLa-7B-SFT",
376
+ "an instruction-tuned bilingual LLaMA with enhanced reasoning ability by an independent researcher",
377
+ )
378
+
379
+ register_model_info(
380
+ ["h2ogpt-gm-oasst1-en-2048-open-llama-7b-preview-300bt-v2"],
381
+ "h2oGPT-GM-7b",
382
+ "https://huggingface.co/h2oai/h2ogpt-gm-oasst1-en-2048-open-llama-7b-preview-300bt-v2",
383
+ "an instruction-tuned OpenLLaMA with enhanced conversational ability by H2O.ai",
384
+ )
385
+
386
+ register_model_info(
387
+ ["baize-v2-7b", "baize-v2-13b"],
388
+ "Baize v2",
389
+ "https://github.com/project-baize/baize-chatbot#v2",
390
+ "A chatbot fine-tuned from LLaMA with ChatGPT self-chat data and Self-Disillation with Feedback (SDF) by UCSD and SYSU.",
391
+ )
392
+
393
+ register_model_info(
394
+ [
395
+ "airoboros-l2-7b-2.1",
396
+ "airoboros-l2-13b-2.1",
397
+ "airoboros-c34b-2.1",
398
+ "airoboros-l2-70b-2.1",
399
+ ],
400
+ "airoboros",
401
+ "https://huggingface.co/jondurbin/airoboros-l2-70b-2.1",
402
+ "an instruction-tuned LlaMa model tuned with 100% synthetic instruction-response pairs from GPT4",
403
+ )
404
+
405
+ register_model_info(
406
+ [
407
+ "spicyboros-7b-2.2",
408
+ "spicyboros-13b-2.2",
409
+ "spicyboros-70b-2.2",
410
+ ],
411
+ "spicyboros",
412
+ "https://huggingface.co/jondurbin/spicyboros-70b-2.2",
413
+ "de-aligned versions of the airoboros models",
414
+ )
415
+
416
+ register_model_info(
417
+ ["Robin-7b-v2", "Robin-13b-v2", "Robin-33b-v2"],
418
+ "Robin-v2",
419
+ "https://huggingface.co/OptimalScale/robin-7b-v2-delta",
420
+ "A chatbot fine-tuned from LLaMA-7b, achieving competitive performance on chitchat, commonsense reasoning and instruction-following tasks, by OptimalScale, HKUST.",
421
+ )
422
+
423
+ register_model_info(
424
+ ["manticore-13b-chat"],
425
+ "Manticore 13B Chat",
426
+ "https://huggingface.co/openaccess-ai-collective/manticore-13b-chat-pyg",
427
+ "A chatbot fine-tuned from LlaMa across several CoT and chat datasets.",
428
+ )
429
+
430
+ register_model_info(
431
+ ["redpajama-incite-7b-chat"],
432
+ "RedPajama-INCITE-7B-Chat",
433
+ "https://huggingface.co/togethercomputer/RedPajama-INCITE-7B-Chat",
434
+ "A chatbot fine-tuned from RedPajama-INCITE-7B-Base by Together",
435
+ )
436
+
437
+ register_model_info(
438
+ [
439
+ "falcon-7b",
440
+ "falcon-7b-instruct",
441
+ "falcon-40b",
442
+ "falcon-40b-instruct",
443
+ "falcon-180b",
444
+ "falcon-180b-chat",
445
+ ],
446
+ "Falcon",
447
+ "https://huggingface.co/tiiuae/falcon-180B",
448
+ "TII's flagship series of large language models",
449
+ )
450
+
451
+ register_model_info(
452
+ ["tigerbot-7b-sft"],
453
+ "Tigerbot",
454
+ "https://huggingface.co/TigerResearch/tigerbot-7b-sft",
455
+ "TigerBot is a large-scale language model (LLM) with multiple languages and tasks.",
456
+ )
457
+
458
+ register_model_info(
459
+ ["internlm-chat-7b", "internlm-chat-7b-8k"],
460
+ "InternLM",
461
+ "https://huggingface.co/internlm/internlm-chat-7b",
462
+ "InternLM is a multi-language large-scale language model (LLM), developed by SHLAB.",
463
+ )
464
+
465
+ register_model_info(
466
+ ["Qwen-7B-Chat"],
467
+ "Qwen",
468
+ "https://huggingface.co/Qwen/Qwen-7B-Chat",
469
+ "Qwen is a multi-language large-scale language model (LLM), developed by Damo Academy.",
470
+ )
471
+
472
+ register_model_info(
473
+ ["Llama2-Chinese-13b-Chat", "LLama2-Chinese-13B"],
474
+ "Llama2-Chinese",
475
+ "https://huggingface.co/FlagAlpha/Llama2-Chinese-13b-Chat",
476
+ "Llama2-Chinese is a multi-language large-scale language model (LLM), developed by FlagAlpha.",
477
+ )
478
+
479
+ register_model_info(
480
+ ["Chinese-Alpaca-2-7B", "Chinese-Alpaca-2-13B"],
481
+ "Chinese-Alpaca",
482
+ "https://huggingface.co/hfl/chinese-alpaca-2-13b",
483
+ "New extended Chinese vocabulary beyond Llama-2, open-sourcing the Chinese LLaMA-2 and Alpaca-2 LLMs.",
484
+ )
485
+
486
+ register_model_info(
487
+ ["Vigogne-2-7B-Instruct", "Vigogne-2-13B-Instruct"],
488
+ "Vigogne-Instruct",
489
+ "https://huggingface.co/bofenghuang/vigogne-2-7b-instruct",
490
+ "Vigogne-Instruct is a French large language model (LLM) optimized for instruction-following, developed by Bofeng Huang",
491
+ )
492
+
493
+ register_model_info(
494
+ ["Vigogne-2-7B-Chat", "Vigogne-2-13B-Chat"],
495
+ "Vigogne-Chat",
496
+ "https://huggingface.co/bofenghuang/vigogne-2-7b-chat",
497
+ "Vigogne-Chat is a French large language model (LLM) optimized for instruction-following and multi-turn dialogues, developed by Bofeng Huang",
498
+ )
499
+
500
+ register_model_info(
501
+ ["stable-vicuna-13B-HF"],
502
+ "stable-vicuna",
503
+ "https://huggingface.co/TheBloke/stable-vicuna-13B-HF",
504
+ "StableVicuna is a Vicuna model fine-tuned using RLHF via PPO on various conversational and instructional datasets.",
505
+ )
506
+
507
+ register_model_info(
508
+ ["deluxe-chat-v1", "deluxe-chat-v1.1", "deluxe-chat-v1.2"],
509
+ "DeluxeChat",
510
+ "",
511
+ "Deluxe Chat",
512
+ )
513
+
514
+ register_model_info(
515
+ [
516
+ "Xwin-LM-7B-V0.1",
517
+ "Xwin-LM-13B-V0.1",
518
+ "Xwin-LM-70B-V0.1",
519
+ "Xwin-LM-7B-V0.2",
520
+ "Xwin-LM-13B-V0.2",
521
+ ],
522
+ "Xwin-LM",
523
+ "https://github.com/Xwin-LM/Xwin-LM",
524
+ "Chat models developed by Xwin-LM team",
525
+ )
526
+
527
+ register_model_info(
528
+ ["lemur-70b-chat"],
529
+ "Lemur-Chat",
530
+ "https://huggingface.co/OpenLemur/lemur-70b-chat-v1",
531
+ "an openly accessible language model optimized for both natural language and coding capabilities ",
532
+ )
533
+
534
+ register_model_info(
535
+ ["Mistral-7B-OpenOrca"],
536
+ "Open-Orca",
537
+ "https://huggingface.co/Open-Orca/Mistral-7B-OpenOrca",
538
+ "A fine-tune of [Mistral 7B](https://huggingface.co/mistralai/Mistral-7B-v0.1) using [OpenOrca dataset](https://huggingface.co/datasets/Open-Orca/OpenOrca)",
539
+ )
540
+
541
+ register_model_info(
542
+ ["dolphin-2.2.1-mistral-7b"],
543
+ "dolphin-mistral",
544
+ "https://huggingface.co/ehartford/dolphin-2.2.1-mistral-7b",
545
+ "An uncensored fine-tuned Mistral 7B",
546
+ )
547
+
548
+ register_model_info(
549
+ [
550
+ "AquilaChat-7B",
551
+ "AquilaChat2-7B",
552
+ "AquilaChat2-34B",
553
+ ],
554
+ "Aquila-Chat",
555
+ "https://huggingface.co/BAAI/AquilaChat2-34B",
556
+ "Chat models developed by BAAI team",
557
+ )
558
+
559
+ register_model_info(
560
+ ["xDAN-L1-Chat-RL-v1"],
561
+ "xDAN-L1-Chat",
562
+ "https://huggingface.co/xDAN-AI/xDAN-L1-Chat-RL-v1",
563
+ "A large language chat model created by xDAN-AI.",
564
+ )
565
+
566
+ register_model_info(
567
+ ["MetaMath-70B-V1.0", "MetaMath-7B-V1.0"],
568
+ "MetaMath",
569
+ "https://huggingface.co/meta-math",
570
+ "MetaMath is a finetune of Llama2 on [MetaMathQA](https://huggingface.co/datasets/meta-math/MetaMathQA) that specializes in mathematical reasoning.",
571
+ )
572
+
573
+ register_model_info(
574
+ ["Yuan2-2B-hf", "Yuan2-51B-hf", "Yuan2-102B-hf"],
575
+ "IEIYuan",
576
+ "https://huggingface.co/IEITYuan",
577
+ "Yuan2 is a Basemodel developed by IEI.",
578
+ )
arena_elo/elo_rating/upload_battle_data.py ADDED
@@ -0,0 +1,193 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import fire
2
+ import json
3
+ import os
4
+ import datasets
5
+ import datetime
6
+ from pathlib import Path
7
+ from datetime import datetime
8
+ from PIL import Image
9
+
10
+ datasets.config.DEFAULT_MAX_BATCH_SIZE = 500
11
+ def create_hf_dataset(data_file: str, split="test"):
12
+ hf_dataset = datasets.Dataset.from_list(
13
+ data_file,
14
+ features=datasets.Features(
15
+ {
16
+ "question_id": datasets.Value("string"),
17
+ "model": datasets.Value("string"),
18
+ "conversation": [
19
+ {
20
+ "role": datasets.Value("string"),
21
+ "content": datasets.Value("string"),
22
+ }
23
+ ],
24
+ "language": datasets.Value("string"),
25
+ "image": datasets.Image(),
26
+ "turn": datasets.Value("int32"),
27
+ }
28
+ ),
29
+ split=split,
30
+ )
31
+ return hf_dataset
32
+
33
+ def create_hf_battle_dataset(data_file: str, split="test"):
34
+ hf_dataset = datasets.Dataset.from_list(
35
+ data_file,
36
+ features=datasets.Features(
37
+ {
38
+ "question_id": datasets.Value("string"),
39
+ "model_a": datasets.Value("string"),
40
+ "model_b": datasets.Value("string"),
41
+ "conversation_a": [
42
+ {
43
+ "role": datasets.Value("string"),
44
+ "content": datasets.Value("string"),
45
+ }
46
+ ],
47
+ "conversation_b": [
48
+ {
49
+ "role": datasets.Value("string"),
50
+ "content": datasets.Value("string"),
51
+ }
52
+ ],
53
+ "language": datasets.Value("string"),
54
+ "image": datasets.Image(),
55
+ "turn": datasets.Value("int32"),
56
+ "anony": datasets.Value("bool"),
57
+ }
58
+ ),
59
+ split=split,
60
+ )
61
+ return hf_dataset
62
+
63
+
64
+
65
+
66
+ def load_image(path:str):
67
+ try:
68
+ return Image.open(path)
69
+ except Exception as e:
70
+ print(f"Error loading image {path}: {e}")
71
+ return None
72
+
73
+ def get_date_from_time_stamp(unix_timestamp: int):
74
+ # Create a datetime object from the Unix timestamp
75
+ dt = datetime.fromtimestamp(unix_timestamp)
76
+
77
+ # Convert the datetime object to a string with the desired format
78
+ date_str = dt.strftime("%Y-%m-%d")
79
+ return date_str
80
+
81
+ def load_battle_image(battle, log_dir):
82
+ image_path = Path(log_dir) / f"{get_date_from_time_stamp(battle['tstamp'])}-convinput_images" / f"input_image_{battle['question_id']}.png"
83
+ return load_image(image_path)
84
+
85
+
86
+ def main(
87
+ data_file: str = "./results/latest/clean_battle_conv.json",
88
+ repo_id: str = "DongfuTingle/wildvision-bench",
89
+ log_dir: str = os.getenv("LOGDIR", "./vision-arena-logs/"),
90
+ mode="battle",
91
+ token = os.environ.get("HUGGINGFACE_TOKEN", None)
92
+ ):
93
+ with open(data_file, "r") as f:
94
+ data = json.load(f)
95
+
96
+
97
+
98
+ has_image_stats = {
99
+ "has_image": 0,
100
+ "no_image": 0,
101
+ }
102
+ if mode == "keep_bad_only":
103
+ # anony only
104
+ data = [d for d in data if d["anony"]]
105
+
106
+ new_data = []
107
+ for battle in data:
108
+ image = load_battle_image(battle, log_dir)
109
+ if image is None:
110
+ has_image_stats["no_image"] += 1
111
+ # we don't keep the data without image
112
+ continue
113
+ has_image_stats["has_image"] += 1
114
+
115
+ if battle["winner"] in ["model_a", "model_b"]:
116
+ if battle["winner"] == "model_a":
117
+ worse_model = "model_b"
118
+ worse_conv = "conversation_b"
119
+ if battle["winner"] == "model_b":
120
+ worse_model = "model_a"
121
+ worse_conv = "conversation_a"
122
+
123
+ new_data.append({
124
+ "question_id": battle["question_id"],
125
+ "model": battle[worse_model],
126
+ "conversation": battle[worse_conv],
127
+ "language": battle["language"],
128
+ "image": image,
129
+ "turn": battle["turn"],
130
+ })
131
+ elif battle["winner"] == "tie (bothbad)":
132
+
133
+ new_data.append({
134
+ "question_id": battle["question_id"],
135
+ "model": battle["model_a"],
136
+ "conversation": battle["conversation_a"],
137
+ "language": battle["language"],
138
+ "image": image,
139
+ "turn": battle["turn"],
140
+ })
141
+
142
+ new_data.append({
143
+ "question_id": battle["question_id"],
144
+ "model": battle["model_b"],
145
+ "conversation": battle["conversation_b"],
146
+ "language": battle["language"],
147
+ "image": image,
148
+ "turn": battle["turn"],
149
+ })
150
+
151
+ split = "test"
152
+ hf_dataset = create_hf_dataset(new_data, "test")
153
+
154
+ elif mode == "battle":
155
+ new_data = []
156
+ for battle in data:
157
+ image = load_battle_image(battle, log_dir)
158
+ if image is None:
159
+ has_image_stats["no_image"] += 1
160
+ continue
161
+ has_image_stats["has_image"] += 1
162
+ new_data.append({
163
+ "question_id": battle["question_id"],
164
+ "model_a": battle["model_a"],
165
+ "model_b": battle["model_b"],
166
+ "conversation_a": battle["conversation_a"],
167
+ "conversation_b": battle["conversation_b"],
168
+ "language": battle["language"],
169
+ "image": image,
170
+ "turn": battle["turn"],
171
+ "anony": battle["anony"],
172
+ })
173
+ split = "test"
174
+ hf_dataset = create_hf_battle_dataset(new_data, "test")
175
+ else:
176
+ raise ValueError(f"Invalid mode: {mode}")
177
+
178
+ print(f"Stats: {has_image_stats}")
179
+ print(hf_dataset)
180
+ print(f"Uploading to part {repo_id}:{split}...")
181
+ hf_dataset.push_to_hub(
182
+ repo_id=repo_id,
183
+ config_name=mode,
184
+ split=split,
185
+ token=token,
186
+ commit_message=f"Add vision-arena {split} dataset",
187
+ )
188
+
189
+ print("Done!")
190
+
191
+
192
+ if __name__ == "__main__":
193
+ fire.Fire(main)
arena_elo/elo_rating/utils.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ import pytz
3
+ import PIL
4
+ import os
5
+
6
+ def detect_language(text: str) -> str:
7
+ """Detect the langauge of a string."""
8
+ import polyglot # pip3 install polyglot pyicu pycld2
9
+ from polyglot.detect import Detector
10
+ from polyglot.detect.base import logger as polyglot_logger
11
+ import pycld2
12
+
13
+ polyglot_logger.setLevel("ERROR")
14
+
15
+ try:
16
+ lang_code = Detector(text).language.name
17
+ except (pycld2.error, polyglot.detect.base.UnknownLanguage):
18
+ lang_code = "unknown"
19
+ return lang_code
20
+
21
+
22
+ def get_time_stamp_from_date(date_str:str):
23
+ """
24
+ Convert a date string to a Unix timestamp
25
+ Args:
26
+ date_str (str): The input date string in the format 'YYYY-MM-DD-HH:MM-TZ', e.g. '2024-02-10-14:00-PT'
27
+ """
28
+
29
+ # Convert the date string into a format that Python's datetime can understand
30
+ # and specify the correct timezone for PT, which is 'US/Pacific'
31
+ date_format = "%Y-%m-%d-%H:%M-%Z"
32
+
33
+ # Parse the date string into a datetime object
34
+ # Note: PT is not directly recognized by pytz, so we manually map it to 'US/Pacific'
35
+ timezone_map = {
36
+ "PT": "US/Pacific",
37
+ }
38
+
39
+ # Extract the timezone abbreviation
40
+ tz_abbr = date_str.split("-")[-1]
41
+ # Map the abbreviation to a pytz timezone
42
+ tz_info = pytz.timezone(timezone_map[tz_abbr])
43
+
44
+ # Remove the timezone abbreviation for parsing
45
+ date_str_parsed = date_str.rsplit("-", 1)[0]
46
+
47
+ # Create a datetime object with the corresponding timezone
48
+ dt = datetime.strptime(date_str_parsed, "%Y-%m-%d-%H:%M").replace(tzinfo=tz_info)
49
+
50
+ # Convert the datetime object to a Unix timestamp
51
+ unix_timestamp = dt.timestamp()
52
+ return unix_timestamp
53
+
54
+ def get_date_from_time_stamp(unix_timestamp: int):
55
+ # Create a datetime object from the Unix timestamp
56
+ dt = datetime.fromtimestamp(unix_timestamp)
57
+
58
+ # Convert the datetime object to a string with the desired format
59
+ date_str = dt.strftime("%Y-%m-%d %H:%M:%S %Z")
60
+ return date_str
61
+
62
+
63
+ def get_input_image_path(tstamp, conv_id):
64
+ # from tstamp to date e.g. 2024-02-10
65
+ date_str = datetime.fromtimestamp(tstamp, tz=pytz.timezone("US/Pacific")).strftime("%Y-%m-%d")
66
+ LOGDIR = os.getenv("LOGDIR")
67
+ return f"{LOGDIR}/{date_str}-convinput_images/input_image_{conv_id}.png"
68
+
69
+ def load_image_from_path(image_path):
70
+ # Load the image from the specified
71
+ # path using the Python Imaging Library (PIL)
72
+ try:
73
+ image = PIL.Image.open(image_path)
74
+ return image
75
+ except FileNotFoundError:
76
+ print(f"Image not found at path: {image_path}")
77
+ return None
78
+ except PIL.UnidentifiedImageError:
79
+ print(f"Unidentified image format at path: {image_path}")
80
+ return None
81
+
82
+
83
+
arena_elo/evaluator/convert_to_evaluator_data.py ADDED
@@ -0,0 +1,134 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import json
3
+ import os
4
+ import time
5
+ from pytz import timezone
6
+ from tqdm import tqdm
7
+ import base64
8
+ from icecream import ic
9
+ from PIL import Image
10
+
11
+
12
+ # Function to encode the image
13
+ def encode_image(image_path):
14
+ with open(image_path, "rb") as image_file:
15
+ return base64.b64encode(image_file.read()).decode('utf-8')
16
+
17
+ def get_log_files(max_num_files=None):
18
+ dates = []
19
+ for month in [2, 3]:
20
+ for day in range(1, 32):
21
+ dates.append(f"2024-{month:02d}-{day:02d}")
22
+
23
+ num_servers = 1
24
+ filenames = []
25
+ for d in dates:
26
+ for i in range(num_servers):
27
+ # name = os.path.expanduser(f"~/fastchat_logs/server{i}/{d}-conv.json")
28
+ name = os.path.expanduser(f"vision-arena-logs/{d}-conv.json")
29
+ if os.path.exists(name):
30
+ filenames.append(name)
31
+ max_num_files = max_num_files or len(filenames)
32
+ filenames = filenames[-max_num_files:]
33
+ return filenames
34
+
35
+
36
+ def pretty_print_conversation(messages):
37
+ for role, msg in messages:
38
+ print(f"[[{role}]]: {msg}")
39
+
40
+ task_template_map = {
41
+ "image_caption": "Give me the semantic alignment score between the given image and the given caption: \"{generated_sentence}\" on a scale of 0-100. Only reply the score value.",
42
+ "vqa": "Rate the answer correctness regarding the question within the context of the given image on a scale of 0-100. Only reply the score value.",
43
+ "pair_rate_old": "[Instruction]\n\"{instruction}\"\n\n\"{generated_sentence}\"\n\n[System]\nGiven the instruction and the image, please compare the correctness of responses A and B. Reply with \"leftvote\" if you find A better, \"rightvote\" if B is better, \"bothbad_vote\" if both responses are wrong, and \"tievote\" if both responses are equally satisfactory. If you are unable to make a decision, please reply with \"NA\".",
44
+ "pair_rate_wexplanation": "<image>[Instruction]\n\"{instruction}\"\n\n\"{generated_sentence}\"[System]\nPlease act as an impartial judge and evaluate the quality of the responses provided by two AI assistants to the user question displayed below. You should choose the assistant that follows the user’s instructions and answers the user’s question better. Your evaluation should consider factors such as the helpfulness, relevance, accuracy, depth, creativity, and level of detail of their responses. Begin your evaluation by comparing the two responses and provide a short explanation. Avoid any positional biases and ensure that the order in which the responses were presented does not influence your decision. Do not allow the length of the responses to influence your evaluation. Do not favor certain names of the assistants. Be as objective as possible. After providing your explanation, output your final verdict by strictly following this format: \"[[A]]\" if assistant A is better, \"[[B]]\" if assistant B is better, and \"[[C]]\" for a tie.",
45
+ "pair_rate": "<image>[Instruction]\n\"{instruction}\"\n\n\"{generated_sentence}\"\n\n[System]\nPlease act as an impartial judge and evaluate the quality of the responses provided by two AI assistants to the user question displayed below. You should choose the assistant that follows the user’s instructions and answers the user’s question better. Your evaluation should consider factors such as the helpfulness, relevance, accuracy, depth, creativity, and level of detail of their responses. Begin your evaluation by comparing the two responses and provide a short explanation. Avoid any positional biases and ensure that the order in which the responses were presented does not influence your decision. Do not allow the length of the responses to influence your evaluation. Do not favor certain names of the assistants. Be as objective as possible. Reply with \"leftvote\" if you find assistant A better, \"rightvote\" if assistant B is better, \"bothbad_vote\" if both responses are wrong, and \"tievote\" if both assistants provide equally satisfactory answers. If you are unable to make a decision, please reply with \"NA\"."
46
+ }
47
+
48
+ def inspect_convs(log_files):
49
+ json_data = []
50
+
51
+ ic(log_files)
52
+ total_vote = 0
53
+
54
+ for filename in tqdm(log_files, desc="read files"):
55
+ for retry in range(5):
56
+ try:
57
+ lines = open(filename).readlines()
58
+ break
59
+ except FileNotFoundError:
60
+ time.sleep(2)
61
+
62
+ for l in lines:
63
+ row = json.loads(l)
64
+
65
+ if "states" not in row:
66
+ continue
67
+ if row["type"] not in ["leftvote", "rightvote", "bothbad_vote", "tievote"]:
68
+ continue
69
+
70
+ model_names = row["states"][0]["model_name"], row["states"][1]["model_name"]
71
+
72
+
73
+ # Iterate through each state and write the relevant information
74
+ if not len(row["states"][0]['messages']): continue
75
+ # ic(row["states"][0]['messages'][1][1])
76
+
77
+ if row["states"][0]['messages'][1][1] is None or row["states"][1]['messages'][1][1] is None or "NETWORK ERROR" in row["states"][0]['messages'][1][1] or "NETWORK ERROR" in row["states"][1]['messages'][1][1]: continue
78
+ total_vote += 1
79
+
80
+ conv_id = row["states"][0]['conv_id']
81
+ image_path = os.path.join("/local/home/yujielu/project/Arena-Elo/vision-arena-logs", os.path.basename(filename)[:-5]+"input_images", f"input_image_{conv_id}.png")
82
+ if not os.path.exists(image_path) :
83
+ continue
84
+ try:
85
+ image = Image.open(image_path).convert("RGB")
86
+ except:
87
+ continue
88
+
89
+ left_response = row["states"][0]['messages'][1][1]
90
+ right_response = row["states"][1]['messages'][1][1]
91
+ instruction = row["states"][0]['messages'][0][1]
92
+ generated_sentence = f"[The Start of Assistant A’s Answer]\n{left_response}\n[The End of Assistant A’s Answer]\n\n[The Start of Assistant B’s Answer]\n{right_response}\n[The End of Assistant B’s Answer]"
93
+ text_prompt = task_template_map["pair_rate"].format(instruction=instruction, generated_sentence=generated_sentence)
94
+
95
+ user_input = text_prompt
96
+ # Create the conversation structure
97
+ conversation = [
98
+ {
99
+ "from": "human",
100
+ "value": user_input
101
+ },
102
+ {
103
+ "from": "gpt",
104
+ "value": row["type"]
105
+ }
106
+ ]
107
+
108
+ # Create the JSON object for each row
109
+ json_obj = {
110
+ "id": conv_id,
111
+ "image": image_path,
112
+ "conversations": conversation
113
+ }
114
+
115
+ # Append the JSON object to the list
116
+ json_data.append(json_obj)
117
+
118
+ # Write the JSON data to a file
119
+ with open('output_evaluator_data.json', 'w') as json_file:
120
+ json.dump(json_data, json_file, indent=2)
121
+
122
+ if __name__ == "__main__":
123
+ parser = argparse.ArgumentParser()
124
+ parser.add_argument("--max-num-files", type=int)
125
+ args = parser.parse_args()
126
+
127
+ log_files = get_log_files(args.max_num_files)
128
+
129
+
130
+
131
+ inspect_convs(log_files)
132
+
133
+
134
+
arena_elo/evaluator/rating_analysis.ipynb ADDED
@@ -0,0 +1,321 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 43,
6
+ "metadata": {},
7
+ "outputs": [
8
+ {
9
+ "name": "stdout",
10
+ "output_type": "stream",
11
+ "text": [
12
+ "1338\n",
13
+ "1044\n"
14
+ ]
15
+ }
16
+ ],
17
+ "source": [
18
+ "\n",
19
+ "import pandas as pd\n",
20
+ "import json\n",
21
+ "\n",
22
+ "# Replace 'your_file_name.csv' with the path to your CSV file\n",
23
+ "file_name = 'all_pairvote_log_wgpt.csv'\n",
24
+ "\n",
25
+ "# Load the CSV file into a DataFrame\n",
26
+ "df = pd.read_csv(file_name)\n",
27
+ "\n",
28
+ "# Define a function to parse JSON data\n",
29
+ "def parse_json(data):\n",
30
+ " try:\n",
31
+ " # Parse the JSON data\n",
32
+ " return json.loads(data)\n",
33
+ " except ValueError as e:\n",
34
+ " # Return None or an empty dictionary if the data cannot be parsed\n",
35
+ " return None\n",
36
+ "\n",
37
+ "# Apply the parse_json function to the 'models' and 'states' columns\n",
38
+ "df['models'] = df['models'].apply(parse_json)\n",
39
+ "df['states'] = df['states'].apply(parse_json)\n",
40
+ "# row[\"states\"][0]['messages'][0][1]\n",
41
+ "\n",
42
+ "# Now df contains the parsed JSON data in the 'models' and 'states' columns\n",
43
+ "# print(df.head())\n",
44
+ "print(len(df))\n",
45
+ "# filter_vote_df = df[df[\"gpt_vote\"].isin([\"leftvote\", \"rightvote\"])]#, \"tievote\", \"bothbad_vote\"\n",
46
+ "# \\#1\n",
47
+ "filter_vote_df = df[df[\"gpt_vote\"].isin([\"leftvote\", \"rightvote\", \"tievote\", \"bothbad_vote\"])]\n",
48
+ "# \\#2\n",
49
+ "# filter_vote_df = df\n",
50
+ "filter_vote_df.loc[~filter_vote_df[\"gpt_vote\"].isin([\"leftvote\", \"rightvote\"]), \"gpt_vote\"] = \"tie\"\n",
51
+ "filter_vote_df.loc[~filter_vote_df[\"type\"].isin([\"leftvote\", \"rightvote\"]), \"type\"] = \"tie\"\n",
52
+ "# \\#3\n",
53
+ "#[df[\"gpt_vote\"].isin([\"leftvote\", \"rightvote\"]) & df[\"type\"].isin([\"leftvote\", \"rightvote\"])]\n",
54
+ "filtered_df = filter_vote_df[filter_vote_df[\"states\"].apply(lambda x: len(x[0]['messages'][0][1]) > 10)]\n",
55
+ "print(len(filtered_df))\n"
56
+ ]
57
+ },
58
+ {
59
+ "cell_type": "code",
60
+ "execution_count": 44,
61
+ "metadata": {},
62
+ "outputs": [
63
+ {
64
+ "name": "stdout",
65
+ "output_type": "stream",
66
+ "text": [
67
+ "Confusion Matrix:\n",
68
+ "[[300 61 34]\n",
69
+ " [102 269 27]\n",
70
+ " [ 99 111 41]]\n",
71
+ "\n",
72
+ "Accuracy: 0.5842911877394636\n"
73
+ ]
74
+ }
75
+ ],
76
+ "source": [
77
+ "import warnings\n",
78
+ "warnings.filterwarnings('ignore')\n",
79
+ "\n",
80
+ "from sklearn.metrics import confusion_matrix, accuracy_score\n",
81
+ "import pandas as pd\n",
82
+ "\n",
83
+ "# Assuming df is your DataFrame\n",
84
+ "\n",
85
+ "# True labels\n",
86
+ "y_true = filtered_df[\"type\"]\n",
87
+ "\n",
88
+ "# Predictions\n",
89
+ "y_pred = filtered_df[\"gpt_vote\"]\n",
90
+ "\n",
91
+ "# Compute the confusion matrix\n",
92
+ "# conf_matrix = confusion_matrix(y_true, y_pred, labels=[\"leftvote\", \"rightvote\", \"tievote\", \"bothbad_vote\"])\n",
93
+ "conf_matrix = confusion_matrix(y_true, y_pred, labels=[\"leftvote\", \"rightvote\", \"tie\"])\n",
94
+ "\n",
95
+ "# Compute the accuracy\n",
96
+ "accuracy = accuracy_score(y_true, y_pred)\n",
97
+ "\n",
98
+ "print(\"Confusion Matrix:\")\n",
99
+ "print(conf_matrix)\n",
100
+ "\n",
101
+ "print(\"\\nAccuracy:\", accuracy)\n"
102
+ ]
103
+ },
104
+ {
105
+ "cell_type": "code",
106
+ "execution_count": 45,
107
+ "metadata": {},
108
+ "outputs": [
109
+ {
110
+ "data": {
111
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAA0YAAAJwCAYAAACtcHEcAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAB0yUlEQVR4nO3deZyN5f/H8feZfcaYMZaxMwyGkTWJURSKKFshS/YtREQoe4oKaVFpsaRBZUlRUrLLUtYsk2VkaYSxjGEYZq7fH37O12ksc8aMM+N+PR+P83jMXPd9X+dzxrnHec913ddtM8YYAQAAAICFubm6AAAAAABwNYIRAAAAAMsjGAEAAACwPIIRAAAAAMsjGAEAAACwPIIRAAAAAMsjGAEAAACwPIIRAAAAAMsjGAEAAACwPIIRAAAWsWTJElWsWFE+Pj6y2Ww6c+ZMuvY/ffp02Ww2HTx4MF37zcpsNptGjhzp6jIApALBCECafPjhh7LZbHrwwQddXcodu/Zh7vfff7/h9kceeUT33XffXa7q3hQdHa3evXurVKlS8vPzk5+fn8LDw9WrVy9t377dYd+RI0fKZrPZH9f2HTp0qOLi4iTJYfutHitWrLhtbZcvX1Z4eLhsNpvGjx9vb+/Tp49sNpv27dt302NfffVV2Wy2FK/hRlasWKFmzZopX7588vLyUnBwsJ566inNnz//tsfeidjYWLVo0UK+vr6aPHmyZs6cqWzZsmXoc95NISEhstlsqlu37g23f/rpp/b3w83O9VtZt26dRo4cme5hEkDm4eHqAgBkTZGRkQoJCdHGjRu1b98+lShRwtUlIZNbtGiRWrZsKQ8PD7Vp00YVKlSQm5ub9uzZo/nz5+ujjz5SdHS0ihYt6nDcRx99JH9/f8XHx2vp0qV6/fXX9euvv2rt2rWaOXOmw75ffPGFfv755xTtZcqUuW1977//vg4dOpSivU2bNnr//fc1a9YsDR8+/IbHzp49W+XKlVP58uVv+RwjRozQ6NGjVbJkSXXv3l1FixZVbGysfvjhBz399NOKjIxU69atb1trWmzatEnnzp3Ta6+9dtPwcKeee+45Pfvss/L29s6Q/m/Hx8dHy5cv17Fjx5QvXz6HbZGRkfLx8dHFixfT1Pe6des0atQodejQQTly5Ej1cQkJCfLw4OMWkCUYAHDSgQMHjCQzf/58kydPHjNy5MhUHXf58mVz6dKlDK7OedOmTTOSzKZNm264vVatWqZs2bJ3uap7y759+0y2bNlMmTJlzD///JNi++XLl827775rDh06ZG8bMWKEkWROnDjhsG+zZs2MJLNu3boU/fTq1cuk5b+2f//91wQGBprRo0cbSebtt9922F6iRAlTunTpGx67bt06I8mMGzfuls/xzTffGEnmmWeeMYmJiSm2L1myxHz//fdO155aM2bMuOX7PKsrWrSoqVOnjgkICDCTJk1y2Hb48GHj5uZmnn766TT/DN5++20jyURHR99236SkJJOQkOD0cwBwLabSAXBaZGSkgoKC1LBhQz3zzDOKjIxMsc/BgwftU5ImTZqk0NBQeXt7a9euXZKkPXv26JlnnlHOnDnl4+OjKlWq6LvvvnPo49oUt7Vr16p///7KkyePsmXLpqZNm+rEiRMO+y5cuFANGzZUgQIF5O3trdDQUL322mtKSkpK99d/7bVNnz49xbb/Xk9wbTrYX3/9pbZt2yowMFB58uTRsGHDZIzR4cOH1bhxYwUEBChfvnyaMGGCQ3+JiYkaPny47r//fgUGBipbtmx6+OGHtXz58hvWNH78eH3yySf2n/cDDzygTZs23fL1/P7777LZbJoxY0aKbT/99JNsNpsWLVokSTp37pxefPFFhYSEyNvbW8HBwXrssce0efPmWz7HW2+9pfPnz2vatGnKnz9/iu0eHh7q06ePChcufMt+JKl27dqSrk7LSy+DBw9WWFiY2rZte8Ptbdq00Z49e274OmfNmiWbzaZWrVrd8jmGDRumnDlzaurUqfL09EyxvV69enryySft3x8/flydO3dW3rx55ePjowoVKqT4N0rtv/sjjzyi9u3bS5IeeOAB2Ww2dejQQdLVKWjXvr7eI488okceecSh7f3331fZsmXl5+enoKAgValSRbNmzbJvv9k1Rh9++KHKli0rb29vFShQQL169UoxJe3alNVdu3bp0UcflZ+fnwoWLKi33nrrZj/SFHx8fNSsWTOHmqSrI3pBQUGqV69eimO2b9+uDh06qHjx4vLx8VG+fPnUqVMnxcbG2vcZOXKkBg4cKEkqVqyYfUretddps9nUu3dvRUZG2l/nkiVL7Nuu/U5ISEhQ6dKlVbp0aSUkJNj7P3XqlPLnz6+IiIgM+Z0FIHUY2wXgtMjISDVr1kxeXl5q1aqVPvroI23atEkPPPBAin2nTZumixcvqlu3bvL29lbOnDm1c+dO1ahRQwULFtTgwYOVLVs2ff3112rSpInmzZunpk2bOvTxwgsvKCgoSCNGjNDBgwc1adIk9e7dW1999ZV9n+nTp8vf31/9+/eXv7+/fv31Vw0fPlxxcXF6++23U/W6zp49q5MnT6Zov3z5spM/oZRatmypMmXKaNy4cVq8eLHGjBmjnDlzasqUKapdu7befPNNRUZGasCAAXrggQdUs2ZNSVJcXJw+++wztWrVSl27dtW5c+f0+eefq169etq4caMqVqzo8DyzZs3SuXPn1L17d9lsNr311ltq1qyZDhw4cMMP45JUpUoVFS9eXF9//bX9w/M1X331lcMHyh49emju3Lnq3bu3wsPDFRsbqzVr1mj37t2qXLnyTV//okWLVKJEiXS5Jm3//v2SpFy5ct1xX5K0ceNGzZgxQ2vWrJHNZrvhPm3atNGoUaM0a9Ysh9eZlJSkr7/+Wg8//LCKFCly0+fYu3ev9uzZo06dOil79uy3rSkhIUGPPPKI9u3bp969e6tYsWL65ptv1KFDB505c0Z9+/Z12P92/+6vvvqqwsLC9Mknn2j06NEqVqyYQkNDU/kTuurTTz9Vnz599Mwzz6hv3766ePGitm/frg0bNtxy+t/IkSM1atQo1a1bV88//7yioqLsvzPWrl3r8L48ffq06tevr2bNmqlFixaaO3euBg0apHLlyumJJ55IVZ2tW7fW448/rv3799tf46xZs/TMM8/c8Bz4+eefdeDAAXXs2FH58uXTzp079cknn2jnzp1av369bDabmjVrpr/++kuzZ8/WO++8o9y5c0uS8uTJY+/n119/1ddff63evXsrd+7cCgkJSfFcvr6+mjFjhmrUqKFXX31VEydOlCT16tVLZ8+e1fTp0+Xu7p6q1wkgA7h6yApA1vL7778bSebnn382xhiTnJxsChUqZPr27euwX3R0tJFkAgICzPHjxx221alTx5QrV85cvHjR3pacnGwiIiJMyZIl7W3XprjVrVvXJCcn29v79etn3N3dzZkzZ+xtFy5cSFFr9+7djZ+fn8Pz3Mi157nV4/qpdNde27Rp01L0JcmMGDHC/v216WDdunWzt125csUUKlTI2Gw2h+lXp0+fNr6+vqZ9+/YO+/53+uHp06dN3rx5TadOnVLUlCtXLnPq1Cl7+8KFC42k207RGjJkiPH09HQ49tKlSyZHjhwOzxMYGGh69ep1y77+6+zZs0aSadKkSYptp0+fNidOnLA/rv93vPazi4qKMidOnDDR0dFmypQpxtvb2+TNm9ecP38+RX/OTqVLTk42VatWNa1atTLG/O/n+N+pdMYY88ADD5hChQqZpKQke9uSJUuMJDNlypRbPs+1f4d33nknVXVNmjTJSDJffvmlvS0xMdFUr17d+Pv7m7i4OId6U/PvfrMpo0WLFnV4z11Tq1YtU6tWLfv3jRs3vu2U0mvPcW262fHjx42Xl5d5/PHHHX5uH3zwgZFkpk6d6vB8kswXX3xhb7t06ZLJly+fefrpp2/5vNdeR8OGDc2VK1dMvnz5zGuvvWaMMWbXrl1Gklm5cuUNfwY3+t0xe/ZsI8msWrXK3narqXSSjJubm9m5c+cNt13/O8GYq+ebm5ubWbVqlX2K5X+n/wG4+5hKB8ApkZGRyps3rx599FFJV6eJtGzZUnPmzLnhFJCnn37a4a+qp06d0q+//qoWLVro3LlzOnnypE6ePKnY2FjVq1dPe/fu1dGjRx366Natm8Nf8h9++GElJSXp77//trf5+vrav77W78MPP6wLFy5oz549qXptkydP1s8//5zicbsL6lOjS5cu9q/d3d1VpUoVGWPUuXNne3uOHDkUFhamAwcOOOzr5eUlSUpOTtapU6d05coVValS5YbTulq2bKmgoCD79w8//LAkOfR5Iy1bttTly5cdVkZbunSpzpw5o5YtWzrUuGHDBv3zzz+pfen2FeT8/f1TbHvkkUeUJ08e+2Py5Mkp9gkLC1OePHlUrFgxde/eXSVKlNDixYvl5+eX6hpuZvr06dqxY4fefPPN2+7btm1bHTlyRKtWrbK3zZo1S15eXmrevPktj732M0jNaJEk/fDDD8qXL5/D9DxPT0/16dNH8fHxWrlypcP+af13d0aOHDl05MiR207NvN4vv/yixMREvfjii3Jz+99Hjq5duyogIECLFy922N/f399hOqOXl5eqVq3q1Otwd3dXixYtNHv2bElXf2cVLlzY/jP5r+t/d1y8eFEnT55UtWrVJOm2U0SvV6tWLYWHh6dq35EjR6ps2bJq3769evbsqVq1aqlPnz6pfi4AGYNgBCDVkpKSNGfOHD366KOKjo7Wvn37tG/fPj344IP6999/tWzZshTHFCtWzOH7ffv2yRijYcOGOXwgzpMnj0aMGCHp6rUV1/vvFKVrHwBPnz5tb9u5c6eaNm2qwMBABQQEKE+ePPYPWGfPnk3V66tatarq1q2b4nH9B860+u9rCAwMlI+Pj31KzvXt178uSZoxY4bKly8vHx8f5cqVS3ny5NHixYtv+LpS87O6kQoVKqh06dIO0xO/+uor5c6d235Nj3T1WqE///xThQsXVtWqVTVy5Mjbfmi9Fgbi4+NTbJsyZYp+/vlnffnllzc9ft68efr555+1YsUK7du3T3/++afuv//+Wz7n9eLj43Xs2DH749r1aXFxcRoyZIgGDhyYqmubnn32Wbm7u9uvX7l48aIWLFigJ5544rbvkYCAAElXQ3tq/P333ypZsqRDmJD+t7re9X8UkNL+7+6MQYMGyd/fX1WrVlXJkiXVq1cvrV279pbHXKszLCzMod3Ly0vFixdP8ToKFSqUYjpjUFCQ06+jdevW2rVrl7Zt26ZZs2bp2Wefvek0yVOnTqlv377KmzevfH197SFcSv3vDinl77pb8fLy0tSpUxUdHa1z585p2rRpN60PwN3DNUYAUu3XX39VTEyM5syZozlz5qTYHhkZqccff9yh7fq/xkpXRz0kacCAATe8EFpSiqW/bzbn3hgjSTpz5oxq1aqlgIAAjR49WqGhofLx8dHmzZs1aNAg+3Oml5t9gLnVRdM3eg23e12S9OWXX6pDhw5q0qSJBg4cqODgYLm7u2vs2LH2a22c7fNmWrZsqddff10nT55U9uzZ9d1336lVq1YOSw23aNFCDz/8sBYsWKClS5fq7bff1ptvvqn58+ff9BqQwMBA5c+fX3/++WeKbdeuObrVDUFr1qyZIkA6Y/z48Ro1apT9+6JFi+rgwYMaP368EhMT1bJlS/vzHzlyRNLVQHHw4EEVKFDAPmJ3baGJefPmafLkyfr+++917tw5tWnT5rY1lC5dWpK0Y8eONL+OW7mTf/dbvZ+v77dMmTKKiorSokWLtGTJEs2bN08ffvihhg8f7vDzvRN38jqu9+CDDyo0NFQvvviioqOjb3kNVIsWLbRu3ToNHDhQFStWlL+/v5KTk1W/fn2nfnf893fd7fz000+SrgbsvXv3OhWsAGQMghGAVIuMjFRwcPANpzvNnz9fCxYs0Mcff3zLDwjFixeXdHVaUHrdS2XFihWKjY3V/Pnz7YsWSOm7atn1rv01/r+rav33r9/pYe7cuSpevLjmz5/v8AH22uhaemrZsqVGjRqlefPmKW/evIqLi9Ozzz6bYr/8+fOrZ8+e6tmzp44fP67KlSvr9ddfv+XF8Q0bNtRnn32mjRs3qmrVqule+620a9dODz30kP37a+/PQ4cO6fTp0ypbtmyKY9544w298cYb2rJli8MCF23atNGSJUv0448/atasWQoICNBTTz112xpKlSqlsLAwLVy4UO++++4NpxVer2jRotq+fbuSk5MdRo2uTQv9772e7kRQUNANb1r6999/28/Xa7Jly6aWLVuqZcuWSkxMVLNmzfT6669ryJAh8vHxueHrkKSoqCiHvhITExUdHZ1h91OSpFatWmnMmDEqU6ZMikVKrjl9+rSWLVumUaNGOdyjau/evSn2Tc8Rne3bt2v06NHq2LGjtm7dqi5dumjHjh0KDAxMt+cA4Dym0gFIlYSEBM2fP19PPvmknnnmmRSP3r1769y5cymW3P6v4OBgPfLII5oyZYpiYmJSbP/vMtypce2vzNf/VTkxMVEffvih032lRkBAgHLnzu1wrYmkDHm+G722DRs26Lfffkv35ypTpozKlSunr776Sl999ZXy58/vEDSTkpJSTC0KDg5WgQIFdOnSpVv2/fLLL8vPz0+dOnXSv//+m2K7syMCzihevLjD1MgaNWpIkvr06aMFCxY4PKZMmSJJ6tChgxYsWJDir/hNmjSRn5+fPvzwQ/34449q1qzZDQPBjYwaNUqxsbHq0qWLrly5kmL70qVL7cuiN2jQQMeOHXOY2njlyhW9//778vf3V61atdL0s7iR0NBQrV+/XomJifa2RYsW6fDhww77Xb98tXR1Olh4eLiMMTddubFu3bry8vLSe++95/Bv/Pnnn+vs2bNq2LBhur2O/+rSpYtGjBiRYgn8693o/JKkSZMmpdg3W7ZsklL+QcRZly9fVocOHVSgQAG9++67mj59uv7991/169fvjvoFcOcYMQKQKt99953OnTunRo0a3XB7tWrVlCdPHkVGRjpcrH8jkydP1kMPPaRy5cqpa9euKl68uP7991/99ttvOnLkiLZt2+ZUbREREQoKClL79u3Vp08f2Ww2zZw5M0M/bHfp0kXjxo1Tly5dVKVKFa1atUp//fVXuj/Pk08+qfnz56tp06Zq2LChoqOj9fHHHys8PPyG1+zcqZYtW2r48OHy8fFR586dHUYrzp07p0KFCumZZ55RhQoV5O/vr19++UWbNm265YdPSSpZsqRmzZqlVq1aKSwsTG3atFGFChVkjFF0dLRmzZolNzc3FSpUKN1f081Urlw5xRLj16bUlS1bVk2aNElxjL+/v5o0aWK/zig10+iuadmypXbs2KHXX39dW7ZsUatWrVS0aFHFxsZqyZIlWrZsmb3fbt26acqUKerQoYP++OMPhYSEaO7cuVq7dq0mTZqU6kUcUqNLly6aO3eu6tevrxYtWmj//v368ssvUyzn/fjjjytfvnyqUaOG8ubNq927d+uDDz5Qw4YNb1pPnjx5NGTIEI0aNUr169dXo0aNFBUVpQ8//FAPPPDATe8blR6KFi3qcE+xGwkICFDNmjX11ltv6fLlyypYsKCWLl16w9Hma9e1vfrqq3r22Wfl6empp556yh6YUmvMmDHaunWrli1bpuzZs6t8+fIaPny4hg4dqmeeeUYNGjRwqj8A6YdgBCBVIiMj5ePjo8cee+yG293c3NSwYUNFRkam+Mvyf4WHh+v333/XqFGjNH36dMXGxio4OFiVKlVymM6SWrly5dKiRYv00ksvaejQoQoKClLbtm1Vp06dm17HdKeGDx+uEydOaO7cufr666/1xBNP6Mcff1RwcHC6Pk+HDh107NgxTZkyRT/99JPCw8P15Zdf6ptvvtGKFSvS9bmkqx/ehw4dqgsXLqQIuH5+furZs6eWLl2q+fPnKzk5WSVKlNCHH36o559//rZ9N27cWDt27NCECRO0dOlSTZ06VTabTUWLFlXDhg3Vo0cPVahQId1fU3pr06aNZs2apfz58zssTJEaY8aMUe3atfXee+/po48+0qlTpxQUFKRq1app4cKF9j88+Pr6asWKFRo8eLBmzJihuLg4hYWFadq0aTe8GeudqFevniZMmKCJEyfqxRdfVJUqVezn0/W6d++uyMhITZw4UfHx8SpUqJD69OmjoUOH3rL/kSNHKk+ePPrggw/Ur18/5cyZU926ddMbb7xx03tr3U2zZs3SCy+8oMmTJ8sYo8cff1w//vijChQo4LDfAw88oNdee00ff/yxlixZouTkZEVHRzsVjDZv3qw33nhDvXv3tq/sKV29wfDChQvVtWtX7dy5Uzly5EivlwfACTaTkX9SBQAAAIAsgGuMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFjePXmDV99KvV1dAuByvy0c6+oSAJcrXSC7q0sAALiYTyoTDyNGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACwv0wSj/fv3a+jQoWrVqpWOHz8uSfrxxx+1c+dOF1cGAAAA4F6XKYLRypUrVa5cOW3YsEHz589XfHy8JGnbtm0aMWKEi6sDAAAAcK/LFMFo8ODBGjNmjH7++Wd5eXnZ22vXrq3169e7sDIAAAAAVpApgtGOHTvUtGnTFO3BwcE6efKkCyoCAAAAYCWZIhjlyJFDMTExKdq3bNmiggULuqAiAAAAAFaSKYLRs88+q0GDBunYsWOy2WxKTk7W2rVrNWDAALVr187V5QEAAAC4x2WKYPTGG2+odOnSKly4sOLj4xUeHq6aNWsqIiJCQ4cOdXV5AAAAAO5xNmOMcXUR1xw+fFg7duxQfHy8KlWqpJIlS6apH99KvdO5MiDr+W3hWFeXALhc6QLZXV0CAMDFfDxSt1+mGDEaPXq0Lly4oMKFC6tBgwZq0aKFSpYsqYSEBI0ePdrV5QEAAAC4x2WKESN3d3fFxMQoODjYoT02NlbBwcFKSkpyqj9GjABGjACJESMAQBYbMTLGyGazpWjftm2bcubM6YKKAAAAAFhJKvNTxggKCpLNZpPNZlOpUqUcwlFSUpLi4+PVo0cPF1YIAAAAwApcGowmTZokY4w6deqkUaNGKTAw0L7Ny8tLISEhql69ugsrBAAAAGAFLg1G7du3lyQVK1ZMERER8vT0dGU5AAAAACzKpcHomlq1aikpKUnz5s3T7t27JUlly5ZVo0aN5O7u7uLqAAAAANzrMkUw2rdvnxo0aKCjR48qLCxMkjR27FgVLlxYixcvVmhoqIsrBAAAAHAvyxSr0vXp00ehoaE6fPiwNm/erM2bN+vQoUMqVqyY+vTp4+ryAAAAANzjMsWI0cqVK7V+/XqHpblz5cqlcePGqUaNGi6sDAAAAIAVZIpg5O3trXPnzqVoj4+Pl5eXlwsqwn91bf6Quj7zsIoWuBpedx84pjc++VFL1+6SJHl7eWhc/2ZqXu9+eXt56JffdqvvG1/p+Kn//bsWzhekd19pqVpVSik+4ZIiv9+gYe9/p6SkZJe8JiA9nDp5XJGfva+tG9fp0qWLylegkJ4fMEKhYeGSpA2rf9Uvi+bpwN49ij93Vm9+FKmQEmEurhrIeHNmRWrGtM918uQJlQorrcGvDFO58uVdXRZwV3EeZC2ZYirdk08+qW7dumnDhg0yxsgYo/Xr16tHjx5q1KiRq8uDpKP/ntGw9xcqos1bqtHmba3Y+Je+eaebyhTPJ0l6a8DTaljzPrV5+XM93mWS8ucJ1JwJXezHu7nZNP+95+Xl6aFHO0xQ1+Ez1bbRgxr+fENXvSTgjsWfi9PwFzvL3d1DQ954VxM/+1rPde+nbNkD7PtcupigsPsqqnWXF1xYKXB3LfnxB41/a6y69+ylOd8sUFhYaT3fvbNiY2NdXRpw13AeZD2ZIhi99957Cg0NVfXq1eXj4yMfHx/VqFFDJUqU0Lvvvuvq8iDph1V/6qc1u7T/0AntO3RcIyd/r/gLl1S1fDEF+PuoQ5PqGjRxvlZu+ktbdh9WtxFfqnrFUFUtFyJJqlu9jMoUz6dOr87Q9r+OaunaXRr94WJ1b1FTnh6sPIis6buvZihXnrzqOXCESpS+T8H5C6pClWrKV6CQfZ+ajzXUM891VbnKVV1YKXB3zZwxTc2eaaEmTZ9WaIkSGjpilHx8fPTt/HmuLg24azgPsp5MEYxy5MihhQsXKioqSnPnztXcuXMVFRWlBQsWONz0FZmDm5tNzevdr2y+XtqwPVqVyhSRl6eHfl0fZd/nr4P/6lDMKT1Yvpgk6cHyxfTnvn8cptb9vG63ArP7Kjw0/11/DUB6+P23VSpeqowmjh6krs0f06AerbXshwWuLgtwqcuJidq9a6eqVY+wt7m5ualatQht37bFhZUBdw/nQdaUKa4xWrNmjR566CGVLFlSJUuWdOrYS5cu6dKlSw5tJjlJNjdGIdJb2RIFtGLGS/Lx8lB8wiW1fOlT7TlwTBVKFdKlxMs6G5/gsP/x2DjlzXV1SlHeXAE6Hut4HdnxU3FXt+UOkKIEZDnHY47q5+/nqeHTbdS0dUftj9qlaZPHy8PDU7Uef9LV5QEucfrMaSUlJSlXrlwO7bly5VJ09AEXVQXcXZwHWVOmGDGqXbu2ihUrpldeeUW7du1y6tixY8cqMDDQ4XHl3z8yqFJr++vgv3rw2bGq2W68Pv1mjT4d/ZxK//81RoAVJZtkFStZWq0691KxEqVVt2Ez1WnQRD8vYpoEAABZTaYIRv/8849eeuklrVy5Uvfdd58qVqyot99+W0eOHLntsUOGDNHZs2cdHh55778LVVvP5StJOnD4pLbsPqzh73+nHX8dVa9Wj+hYbJy8vTwV6O/rsH9wrgD9G3t1VOjf2DgF58ruuD3n1dGkf0/G3Z0XAKSzoJy5VbBIMYe2gkWK6eTxYy6qCHC9oBxBcnd3T3GBeWxsrHLnzu2iqoC7i/Mga8oUwSh37tzq3bu31q5dq/3796t58+aaMWOGQkJCVLt27Vse6+3trYCAAIcH0+juDjebTd5eHtqy+5ASL1/Row/+bwnikkWDVSR/Tm3YHi1J2rA9WveVKKA8Qf72fepUK62z5xK0+wAfIpE1hZWtoJgjfzu0xRz5W3nyct0crMvTy0tlwstqw/rf7G3JycnasOE3la9QyYWVAXcP50HWlCmuMbpesWLFNHjwYFWoUEHDhg3TypUrXV0SJI1+oZF+WrtTh2NOK3s2H7V8oopqVimpp3p+qLj4i5r+7W9686VmOnX2vM6dv6iJg5pr/bYD2rjjoCTpl992a/eBY/p8THu9+u63ypsrQCN6PakpX69S4uUrrn1xQBo1eLq1hvftpAWzpqp6rce0L2qnlv2wQF1ffNW+T3zcWZ08fkynY09Ikv75/yCVI2cu5cjJXw1xb3qufUcNe2WQypa9T/eVK68vZ85QQkKCmjRt5urSgLuG8yDrsRljjKuLuGbt2rWKjIzU3LlzdfHiRTVu3Fht2rRR/fr1nerHt1LvDKrQuj4a0VqPVg1TvtwBOht/UX/uPaoJ037Rrxv2SPrfDV5b1P//G7yu262+Y7/Sv9ctuFAkf5DefeVZ1by/pM5fvKTI7zdq6HsLucFrBvlt4VhXl2AJf6xfrdmff6BjRw8rT74CevKZNqrToKl9+4qfvtdH40elOO6Z57qqebvud7NUSypdIPvtd0KGmB35pf3GlmGly2jQK0NVvnwFV5cF3FWcB5mDTyqHgjJFMBoyZIjmzJmjf/75R4899pjatGmjxo0by8/PL039EYwAghEgEYwAAKkPRpliKt2qVas0cOBAtWjRggvSAAAAANx1mSIYjR07VhEREfLwcCznypUrWrdunWrWrOmiygAAAABYQaZYle7RRx/VqVOnUrSfPXtWjz76qAsqAgAAAGAlmSIYGWNks9lStMfGxipbtmwuqAgAAACAlbh0Kl2zZleXK7TZbOrQoYO8vb3t25KSkrR9+3ZFRES4qjwAAAAAFuHSYBQYGCjp6ohR9uzZ5evra9/m5eWlatWqqWvXrq4qDwAAAIBFuCwY9e/fXx988IGyZcumgwcP6rPPPpO/v7+rygEAAABgYS67xuj9999XfHy8pKvLdV+4cMFVpQAAAACwOJeNGIWEhOi9997T448/LmOMfvvtNwUFBd1wX5brBgAAAJCRbMYY44on/vbbb9WjRw8dP35cNptNNyvDZrMpKSnJqb59K/VOjxKBLO23hWNdXQLgcqULZHd1CQAAF/NJ5VCQy0aMmjRpoiZNmig+Pl4BAQGKiopScHCwq8oBAAAAYGEuXZVOkvz9/bV8+XIVK1ZMHh4uLwcAAACABWWKG7zWqlVLf//9t4YOHapWrVrp+PHjkqQff/xRO3fudHF1AAAAAO51mSIYrVy5UuXKldOGDRs0f/58+2p127Zt04gRI1xcHQAAAIB7XaYIRoMHD9aYMWP0888/y8vLy95eu3ZtrV+/3oWVAQAAALCCTBGMduzYoaZNm6ZoDw4O1smTJ11QEQAAAAAryRTBKEeOHIqJiUnRvmXLFhUsWNAFFQEAAACwkkwRjJ599lkNGjRIx44dk81mU3JystauXasBAwaoXbt2ri4PAAAAwD0uUwSjN954Q6VLl1bhwoUVHx+v8PBwPfzww4qIiNDQoUNdXR4AAACAe5zNGGNcXcQ1hw8f1o4dOxQfH69KlSqpZMmSaerHt1LvdK4MyHp+WzjW1SUALle6QHZXlwAAcDGfVN4q1WV3VO3fv/8tt1+/Gt3EiRMzuhwAAAAAFuayYLRly5ZU7Wez2TK4EgAAAABW57JgtHz5clc9NQAAAAA4yBSLLwAAAACAKxGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFiezRhjXF1Eevt66z+uLgFwufZD57u6BMDlTn/b29UlAABczMcjdfsxYgQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8ghGAAAAACyPYAQAAADA8jJNMFq9erXatm2r6tWr6+jRo5KkmTNnas2aNS6uDAAAAMC9LlMEo3nz5qlevXry9fXVli1bdOnSJUnS2bNn9cYbb7i4OgAAAAD3ukwRjMaMGaOPP/5Yn376qTw9Pe3tNWrU0ObNm11YGQAAAAAryBTBKCoqSjVr1kzRHhgYqDNnztz9ggAAAABYSqYIRvny5dO+fftStK9Zs0bFixd3QUUAAAAArCRTBKOuXbuqb9++2rBhg2w2m/755x9FRkZqwIABev75511dHgAAAIB7nIerC5CkwYMHKzk5WXXq1NGFCxdUs2ZNeXt7a8CAAXrhhRdcXR4AAACAe5zNGGNcXcQ1iYmJ2rdvn+Lj4xUeHi5/f/809fP11n/SuTIg62k/dL6rSwBc7vS3vV1dAgDAxXxSORSUKabSderUSefOnZOXl5fCw8NVtWpV+fv76/z58+rUqZOrywMAAABwj8sUwWjGjBlKSEhI0Z6QkKAvvvjCBRUBAAAAsBKXXmMUFxcnY4yMMTp37px8fHzs25KSkvTDDz8oODjYhRUCAAAAsAKXBqMcOXLIZrPJZrOpVKlSKbbbbDaNGjXKBZUBAAAAsBKXBqPly5fLGKPatWtr3rx5ypkzp32bl5eXihYtqgIFCriwQgAAAABW4NJgVKtWLUlSdHS0ChcuLDe3THHJEwAAAACLyRT3MSpatKjOnDmjzz//XLt375YklS1bVp06dVJgYKCLqwMAAABwr8sUQzS///67QkND9c477+jUqVM6deqUJk6cqNDQUG3evNnV5QEAAAC4x2WKEaN+/fqpUaNG+vTTT+XhcbWkK1euqEuXLnrxxRe1atUqF1cIAAAA4F6WKYLR77//7hCKJMnDw0Mvv/yyqlSp4sLKAAAAAFhBpphKFxAQoEOHDqVoP3z4sLJnz+6CigAAAABYSaYIRi1btlTnzp311Vdf6fDhwzp8+LDmzJmjLl26qFWrVq4uDwAAAMA9LlNMpRs/frxsNpvatWunK1euSJI8PT31/PPPa9y4cS6uDgAAAMC9zmaMMa4u4poLFy5o//79kqTQ0FD5+fmlqZ+vt/6TnmUBWVL7ofNdXQLgcqe/7e3qEgAALuaTyqGgTDFi9OWXX6pZs2by8/NTuXLlXF0ObuLgrm1a8/1X+if6L507HatWA15T+AMP2bcbY/TrN9P0+7LFung+XkXC7lOjLv2UK38hSdLp48e0Yv4XOvDnFsWfOaXsOXOrwkN1VatZW3l4eLrqZQGpNqD5/WpSvbhKFQpSQuIVbdh9TK9OX6e9R8847Pdg6Xwa+Vw1PRCWV0nJRtsPnNBTw7/TxcQkSVLF0Dwa06G67i95dfu36/Zr0GdrdP7iZRe8KiBjzJkVqRnTPtfJkydUKqy0Br8yTOXKl3d1WcBdxXmQtWSKa4z69eun4OBgtW7dWj/88IOSkpJcXRJuIPHSReUrGqonO/W94fbV383R+h/nq1GXfur++ofy8vHRjDde1uXEREnSyX8OySQbNe7aXy9MmKYn2vXUpl++1y+zP7ubLwNIs4fvK6CPF+9QrQFz9eSwhfLwcNOi1xrJz/t/f2N6sHQ+LRz1lJZtOaSH+3+jh/p9rY8X7VBy8tXB+fw5s2nxmMbaH3NWNV/6Ro1HfKfwIjn1ab86rnpZQLpb8uMPGv/WWHXv2UtzvlmgsLDSer57Z8XGxrq6NOCu4TzIejJFMIqJidGcOXNks9nUokUL5c+fX7169dK6detcXRquU6rSg6r7bGeFV304xTZjjH77Ya5qNXtOZR54SPmKhurpXkN07vRJ7d60RpJUsmJVNes5SCUqPKCceQuoTJUaeujJFtq1cfXdfilAmjQe8b2+XLZHuw+d0o7oWHV75xcVCQ5QpRLB9n3e6vKQPvx+u8bP3azdh05p79EzmrdmnxKvJEuSnnggRJevJOvFj1Zq79Ez+mPvcb0weYWa1iih4vkDXfXSgHQ1c8Y0NXumhZo0fVqhJUpo6IhR8vHx0bfz57m6NOCu4TzIejJFMPLw8NCTTz6pyMhIHT9+XO+8844OHjyoRx99VKGhoa4uD6lw+niM4s+cUmi5++1tPn7+KlSijA7v3XnT4y5eOC9ff5ZkR9YUkM1bknQ6/qIkKU+gr6qWzqcTZxK0/O2ndXBmJy0d21QR4fntx3h7uuvylSRdf3VnQuLVRWeu3w/Iqi4nJmr3rp2qVj3C3ubm5qZq1SK0fdsWF1YG3D2cB1lTpghG1/Pz81O9evX0xBNPqGTJkjp48KCrS0IqxJ85JUnyDwxyaM8WGGTf9l+xx45q/ZIFeqDuUxleH5DebDbp7a4Pa93Of7Tr76vv8WL5AiRJr7auqqk/7VLjEd9p6/4T+uH1JgotcHU0aMX2I8ob5Kd+zSrJ08NNObJ5a0yH6pKkfDmzuebFAOno9JnTSkpKUq5cuRzac+XKpZMnT7qoKuDu4jzImjJNMLpw4YIiIyPVoEEDFSxYUJMmTVLTpk21c+fNRxsk6dKlS4qLi3N4XE68dJeqRlrFnTqhL954WfdVq6UqdZ50dTmA0yY9X0tli+ZUu7d+sre52WySpM+X/KmZv+zWtgMn9fJna/TXkdNq/1i4JGn3oVPq+s4y9WlaUafm9dDBLzvp4LE4HTt9XiY50ywSCgCA5WSKVemeffZZLVq0SH5+fmrRooWGDRum6tWrp+rYsWPHatSoUQ5tz3Tvr+Y9XsqIUnET/jlySpLiz55W9qD//XXk/NnTyhdSwmHfuFMnNXV0fxUuVVaNuvHvhKznnR411eCBENUdPF9HY8/b22NOX/169yHHUdKow6dVOI+//fuvVv6lr1b+peAcvjp/8YqMMerTpKKij529Oy8AyEBBOYLk7u6e4gLz2NhY5c6d20VVAXcX50HWlClGjNzd3fX1118rJiZGH3zwQapDkSQNGTJEZ8+edXg06cR9K+62oOD88s+RUwd2bLa3XbxwXkf27VbhkmXtbXGnTmjq6H4qUKyUmvUcJDe3TPEWBFLtnR411ah6cdV/9Vv9/e85h21//3tO/8TGq1QhxymlJQrm0KHjjvtK0vEzCTp/8bKeqVlSFy8nadnWwxlaO3A3eHp5qUx4WW1Y/5u9LTk5WRs2/KbyFSq5sDLg7uE8yJoyxYhRvXr1VKdOHbm7uzu0JyYmas6cOWrXrt1Nj/X29pa3t7dDm6dXfIbUaXWXLibo1LGj9u/PHI9RzMF98vXPrhy586p6g2e0YsFM5cxfUEHB+bXsq6nKHpRbZf7/Xkdxp07o81H9lCN3XtV/rofOx/3vr+PZ/3/ECcjMJj1fSy1rlVLzMYsVf+Gy8ua4ehPqsxcu2e9R9M68LRrapqp2RJ/UtgMn1bZOaYUVClLrsT/a++nxZDmt331M8QmXVadSYb3RMULDZvyms+cTXfK6gPT2XPuOGvbKIJUte5/uK1deX86coYSEBDVp2szVpQF3DedB1mMzxrh8Uru7u7tiYmIUHBzs0B4bG6vg4GCn72v09dZ/0rM8/L/onVs1dXS/FO2VatVTs56D/3eD118W6eKFeBUJK6enOr+o3AUKS5I2r1iiBR+9ecO+X/tqeYbWbkXth853dQn3nIRFNx6N7vrOL/py2R779wOeqazuDcspKLuPdkSf1KvT1mndrhj79s/611X9KiHy9/VU1JHTmjR/i2Yvj8rw+q3o9LfMIHCV2ZFf2m9sGVa6jAa9MlTly1dwdVnAXcV5kDn4pHIoKE3BaPXq1ZoyZYr279+vuXPnqmDBgpo5c6aKFSumhx56yNnu5Obmpn///Vd58uRxaN+2bZseffRRnTp141XNboZgBBCMAIlgBABIfTByeirdvHnz9Nxzz6lNmzbasmWLLl26ugLc2bNn9cYbb+iHH35IdV+VKlWSzWaTzWZTnTp15OHxv3KSkpIUHR2t+vXrO1siAAAAADjF6WA0ZswYffzxx2rXrp3mzJljb69Ro4bGjBnjVF9NmjSRJG3dulX16tWTv///Vm3y8vJSSEiInn76aWdLBAAAAACnOB2MoqKiVLNmzRTtgYGBOnPmjFN9jRgxQpIUEhKili1bysfHx9lyAAAAAOCOOR2M8uXLp3379ikkJMShfc2aNSpevHiaimjfvr2kq6vQHT9+XMnJyQ7bixQpkqZ+AQAAACA1nA5GXbt2Vd++fTV16lTZbDb9888/+u233zRgwAANGzYsTUXs3btXnTp10rp16xzajTGy2WxOr0oHAAAAAM5wOhgNHjxYycnJqlOnji5cuKCaNWvK29tbAwYM0AsvvJCmIjp06CAPDw8tWrRI+fPnl81mS1M/AAAAAJAWab6PUWJiovbt26f4+HiFh4c7LJzgrGzZsumPP/5Q6dKl09zH9ViuG2C5bkBiuW4AQOqX63ZztuNOnTrp3Llz8vLyUnh4uKpWrSp/f3+dP39enTp1crY7SVJ4eLhOnjyZpmMBAAAA4E45HYxmzJihhISEFO0JCQn64osvUt1PXFyc/fHmm2/q5Zdf1ooVKxQbG+uwLS4uztkSAQAAAMApqb7GKC4uTsYYGWN07tw5h6W1k5KS9MMPPyg4ODjVT5wjRw6Ha4mMMapTp47DPiy+AAAAAOBuSHUwuhZkbDabSpUqlWK7zWbTqFGjUv3Ey5cvT/W+AAAAAJCRUh2Mli9fLmOMateurXnz5ilnzpz2bV5eXipatKgKFCiQ6ieuVauWc5UCAAAAQAZJdTC6FmSio6NVuHBhubk5fXnSTW3fvv2G7TabTT4+PipSpIi8vb3T7fkAAAAA4HpO38eoaNGiOnPmjD7//HPt3r1bklS2bFl16tRJgYGBaSqiYsWKt7x3kaenp1q2bKkpU6Y4XNsEAAAAAOnB6WGf33//XaGhoXrnnXd06tQpnTp1ShMnTlRoaKg2b96cpiIWLFigkiVL6pNPPtHWrVu1detWffLJJwoLC9OsWbP0+eef69dff9XQoUPT1D8AAAAA3IrTN3h9+OGHVaJECX366afy8Lg64HTlyhV16dJFBw4c0KpVq5wuomrVqnrttddUr149h/affvpJw4YN08aNG/Xtt9/qpZde0v79+2/bHzd4BbjBKyBxg1cAQOpv8Or0VLrff//dIRRJkoeHh15++WVVqVLF2e4kSTt27FDRokVTtBctWlQ7duyQdHW6XUxMTJr6BwAAAIBbcXoqXUBAgA4dOpSi/fDhw8qePXuaiihdurTGjRunxMREe9vly5c1btw4lS5dWpJ09OhR5c2bN039AwAAAMCtOD1i1LJlS3Xu3Fnjx49XRESEJGnt2rUaOHCgWrVqlaYiJk+erEaNGqlQoUIqX768pKujSElJSVq0aJEk6cCBA+rZs2ea+gcAAACAW3E6GI0fP142m03t2rXTlStXJF1dNe7555/XuHHj0lRERESEoqOjFRkZqb/++kuS1Lx5c7Vu3do+CvXcc8+lqW8AAAAAuJ1UL75QpUoVdenSRa1bt1ZAQIAuXLhgXwghNDRUfn5+GVqoM1h8AWDxBUBi8QUAQAYsvlChQgW9/PLLeumll/T000+rU6dOeuSRR9JYnvTdd9/piSeekKenp7777rtb7tuoUaM0Pw8AAAAA3I5Ty3VfuHBBX3/9taZPn67Vq1erWLFi6tSpk9q3b6+CBQs69cRubm46duyYgoOD5eZ28zUgbDabkpKSnOqbESOAESNAYsQIAJD6ESOnVqXz8/NThw4dtGLFCv3111969tlnNWXKFIWEhKhhw4aaPz/1H8SSk5MVHBysy5cv65FHHtGePXuUnJyc4uFsKAIAAAAAZzm9XPc1oaGhGjNmjA4ePKjZs2dr/fr1at68udP9eHp6aseOHbccNQIAAACAjHRHaWTFihXq0KGDOnTooKSkJHXt2jVN/bRt21afffbZnZQCAAAAAGnm9HLdR44c0fTp0zV9+nQdOHBADz/8sD788EM1b95cvr6+aSriypUrmjp1qn755Rfdf//9ypYtm8P2iRMnpqlfAAAAAEiNVAejr7/+WlOnTtWyZcsUHBys9u3bq1OnTipRosQdF/Hnn3+qcuXKkmS/j9E1NpvtjvsHAAAAgFtJdTBq27atGjZsqAULFqhBgwbpek3Q8uXL060vAAAAAHBWqoPRkSNHFBwcnJG1AAAAAIBLpHrYh1AEAAAA4F7FGtkAAAAALI9gBAAAAMDyCEYAAAAALM/p+xhdk5iYqOPHjys5OdmhvUiRIndcFAAAAADcTU4Ho71796pTp05at26dQ7sxRjabTUlJSelWHAAAAADcDU4How4dOsjDw0OLFi1S/vz5uQErAAAAgCzP6WC0detW/fHHHypdunRG1AMAAAAAd53Tiy+Eh4fr5MmTGVELAAAAALiE08HozTff1Msvv6wVK1YoNjZWcXFxDg8AAAAAyGqcnkpXt25dSVKdOnUc2ll8AQAAAEBW5XQwWr58eUbUAQAAAAAu43QwqlWrVkbUAQAAAAAuk+YbvF64cEGHDh1SYmKiQ3v58uXvuCgAAAAAuJucDkYnTpxQx44d9eOPP95wO9cYAQAAAMhqnF6V7sUXX9SZM2e0YcMG+fr6asmSJZoxY4ZKliyp7777LiNqBAAAAIAM5fSI0a+//qqFCxeqSpUqcnNzU9GiRfXYY48pICBAY8eOVcOGDTOiTgAAAADIME6PGJ0/f17BwcGSpKCgIJ04cUKSVK5cOW3evDl9qwMAAACAu8DpYBQWFqaoqChJUoUKFTRlyhQdPXpUH3/8sfLnz5/uBQIAAABARnN6Kl3fvn0VExMjSRoxYoTq16+vyMhIeXl5afr06eldHwAAAABkOKeDUdu2be1f33///fr777+1Z88eFSlSRLlz507X4gAAAADgbkjzfYyu8fPzU+XKldOjFgAAAABwCaeDkTFGc+fO1fLly3X8+HElJyc7bJ8/f366FQcAAAAAd4PTwejFF1/UlClT9Oijjypv3ryy2WwZURcAAAAA3DVOB6OZM2dq/vz5atCgQUbUAwAAAAB3ndPLdQcGBqp48eIZUQsAAAAAuITTwWjkyJEaNWqUEhISMqIeAAAAALjrnJ5K16JFC82ePVvBwcEKCQmRp6enw/bNmzenW3EAAAAAcDc4HYzat2+vP/74Q23btmXxBQAAAAD3BKeD0eLFi/XTTz/poYceyoh6AAAAAOCuc/oao8KFCysgICAjagEAAAAAl3A6GE2YMEEvv/yyDh48mAHlAAAAAMDdZzPGGGcOCAoK0oULF3TlyhX5+fmlWHzh1KlT6VpgWly84uoKAAAAAGQGPqm8eMjpa4wmTZrk7CEAAAAAkKk5PWKUFTBiBAAAAEDKwBGjQ4cO3XJ7kSJFnO0SAAAAAFzK6REjNze3W967KCkp6Y6LulOMGAEAAACQMnDEaMuWLQ7fX758WVu2bNHEiRP1+uuvO9sdAAAAALhcul1jtHjxYr399ttasWJFenR3RxgxAgAAACClfsTI6fsY3UxYWJg2bdqUXt0BAAAAwF3j9FS6uLg4h++NMYqJidHIkSNVsmTJdCsMAAAAAO4Wp4NRjhw5Uiy+YIxR4cKFNWfOnHQrDAAAAADuFqeD0fLlyx2+d3NzU548eVSiRAl5eDjdHQAAAAC4HDd4BQAAAHDPSvflur/77rtU7deoUaPUdgkAAAAAmUKqR4zc3BwXsLPZbPrvoTabjRu8AgAAAMg00n257uTkZIeHn5+f9u3b59CWGUIRAAAAADgr3e5jBAAAAABZFcEIAAAAgOURjAAAAABYXpqDkc1mS3GjVwAAAADIilK9Kl1QUJBDEDpz5owCAgJSrFZ36tSp9K0wDViVDgAAAICUAfcxmjRpUhpLAQAAAIDMLdUjRlkJI0YAAAAApAy4jxEAAAAA3KsIRgAAAAAsj2AEAAAAwPIIRgAAAAAsj2AEAAAAwPJSvVz39Y4cOaLvvvtOhw4dUmJiosO2iRMnpkthAAAAAHC3OB2Mli1bpkaNGql48eLas2eP7rvvPh08eFDGGFWuXDkjagQAAACADOX0VLohQ4ZowIAB2rFjh3x8fDRv3jwdPnxYtWrVUvPmzTOiRgAAAADIUE7f4DV79uzaunWrQkNDFRQUpDVr1qhs2bLatm2bGjdurIMHD2ZQqanHDV4BAAAASBl4g9ds2bLZryvKnz+/9u/fb9928uRJZ7sDAAAAAJdz+hqjatWqac2aNSpTpowaNGigl156STt27ND8+fNVrVq1jKgRAAAAADKU01PpDhw4oPj4eJUvX17nz5/XSy+9pHXr1qlkyZKaOHGiihYtmlG1phpT6QAAAABIqZ9K53QwygoIRgAAAACk1AejNN3HSJISExN1/PhxJScnO7QXKVIkrV0CAAAAgEs4HYz++usvde7cWevWrXNoN8bIZrMpKSkp3YoDAAAAgLvB6WDUsWNHeXh4aNGiRcqfP79sNltG1AUAAAAAd43T1xhly5ZNf/zxh0qXLp1RNd0xrjECAAAAIGXgfYzCw8O5XxEAAACAe4rTwejNN9/Uyy+/rBUrVig2NlZxcXEODwAAAADIapyeSufmdjVL/ffaosy0+AJT6QAAAABIGbhc9/Lly509BAAAAAAyNW7wCgAAAOCelWGLL0jS6tWr1bZtW0VEROjo0aOSpJkzZ2rNmjVp6Q4AAAAAXMrpYDRv3jzVq1dPvr6+2rx5sy5duiRJOnv2rN544410LxAAAAAAMprTwWjMmDH6+OOP9emnn8rT09PeXqNGDW3evDldiwMAAACAu8HpYBQVFaWaNWumaA8MDNSZM2fSVMTMmTNVo0YNFShQQH///bckadKkSVq4cGGa+gMAAAAAZzgdjPLly6d9+/alaF+zZo2KFy/udAEfffSR+vfvrwYNGujMmTP25b5z5MihSZMmOd0fAAAAADjL6WDUtWtX9e3bVxs2bJDNZtM///yjyMhIDRgwQM8//7zTBbz//vv69NNP9eqrr8rd3d3eXqVKFe3YscPp/gAAAADAWU7fx2jw4MFKTk5WnTp1dOHCBdWsWVPe3t4aMGCAXnjhBacLiI6OVqVKlVK0e3t76/z58073BwAAAADOcioYJSUlae3aterVq5cGDhyoffv2KT4+XuHh4fL3909TAcWKFdPWrVtVtGhRh/YlS5aoTJkyaeoTAAAAAJzhVDByd3fX448/rt27dytHjhwKDw+/4wL69++vXr166eLFizLGaOPGjZo9e7bGjh2rzz777I77BwAAAIDbcXoq3X333acDBw6oWLFi6VJAly5d5Ovrq6FDh+rChQtq3bq1ChQooHfffVfPPvtsujwHAAAAANyKzRhjnDlgyZIlGjJkiF577TXdf//9ypYtm8P2gICANBdz4cIFxcfHKzg4OM19SNLFK3d0OAAAAIB7hE8qh4KcDkZubv9byM5ms9m/NsbIZrPZl9tOrdq1a2v+/PnKkSOHQ3tcXJyaNGmiX3/91an+JIIRAAAAgKtSG4ycnkq3fPlyZw+5pRUrVigxMTFF+8WLF7V69ep0fS4AAAAAuBGng1GtWrVuuu3PP/9MdT/bt2+3f71r1y4dO3bM/n1SUpKWLFmiggULOlseAAAAADjN6al0/3Xu3DnNnj1bn332mf74449UT6Vzc3OzT8W7UQm+vr56//331alTJ6drYiodAAAAACkDp9Jds2rVKn3++eeaN2+eChQooGbNmmny5MmpPj46OlrGGBUvXlwbN25Unjx57Nu8vLwUHBwsd3f3tJYHAAAAAKnmVDA6duyYpk+frs8//1xxcXFq0aKFLl26pG+//dbpexpdu6FrcnKyU8cBAAAAQHpzu/0uVz311FMKCwvT9u3bNWnSJP3zzz96//3306WI/fv364UXXlDdunVVt25d9enTR/v370+XvgEAAADgdlIdjH788Ud17txZo0aNUsOGDdNtmttPP/2k8PBwbdy4UeXLl1f58uW1YcMGlS1bVj///HO6PAfurjmzIvXEY7X1QKVyavNsc+24bqENwCo4D2B1nAMA50FWk+pgtGbNGp07d07333+/HnzwQX3wwQc6efLkHRcwePBg9evXTxs2bNDEiRM1ceJEbdiwQS+++KIGDRp0x/3j7lry4w8a/9ZYde/ZS3O+WaCwsNJ6vntnxcbGuro04K7hPIDVcQ4AnAdZkdOr0p0/f15fffWVpk6dqo0bNyopKUkTJ05Up06dlD17dqcL8PHx0Y4dO1SyZEmH9r/++kvly5fXxYsXne6TVelcp82zzVX2vnJ6ZehwSVevIXu8Ti21av2cOnft5uLqgLuD8wBWxzkAcB5kJqldlS7VI0bXZMuWTZ06ddKaNWu0Y8cOvfTSSxo3bpyCg4PVqFEjZ7tTnjx5tHXr1hTtW7duVXBwsNP9wXUuJyZq966dqlY9wt7m5uamatUitH3bFhdWBtw9nAewOs4BgPMgq3I6GF0vLCxMb731lo4cOaLZs2enqY+uXbuqW7duevPNN7V69WqtXr1a48aNU/fu3dW1a9fbHn/p0iXFxcU5PC5dupSmWnBnTp85raSkJOXKlcuhPVeuXOky7RLICjgPYHWcAwDnQVaV5vsYXc/d3V1NmjRRkyZNnD522LBhyp49uyZMmKAhQ4ZIkgoUKKCRI0eqT58+tz1+7NixGjVqlEPbq8NGaOjwkU7XAgAAAMCa0iUY3QmbzaZ+/fqpX79+OnfunCQ5da3SkCFD1L9/f4c24+6drjUidYJyBMnd3T3FRYWxsbHKnTu3i6oC7i7OA1gd5wDAeZBV3dFUuvQwZswYRUdHS7oaiJxdwMHb21sBAQEOD29vgpEreHp5qUx4WW1Y/5u9LTk5WRs2/KbyFSq5sDLg7uE8gNVxDgCcB1mVy4PRN998oxIlSigiIkIffvgh8y6zuOfad9T8uV/ru28X6MD+/RozeqQSEhLUpGkzV5cG3DWcB7A6zgGA8yArcnq57oywc+dORUZGas6cOTpy5Igee+wxtWnTRk2aNJGfn5/T/bFct2vNjvxSM6Z9rpMnTyisdBkNemWoypev4OqygLuK8wBWxzkAcB5kFqldrjtTBKPrrV27VrNmzdI333yjixcvKi4uzuk+CEYAAAAApAy8j1FGy5Ytm3x9feXl5aXLly+7uhwAAAAAFpApglF0dLRef/11lS1bVlWqVNGWLVs0atQoHTt2zNWlAQAAALAAly/XXa1aNW3atEnly5dXx44d1apVKxUsWNDVZQEAAACwEJcHozp16mjq1KkKDw93dSkAAAAALCrTLL6QmJio6OhohYaGysPjzvIaiy8AAAAAkLLQ4gsJCQnq3Lmz/Pz8VLZsWR06dEiS9MILL2jcuHEurg4AAACAFbg8GA0ePFjbtm3TihUr5OPjY2+vW7euvvrqKxdWBgAAAMAqXH6N0bfffquvvvpK1apVk81ms7eXLVtW+/fvd2FlAAAAAKzC5SNGJ06cUHBwcIr28+fPOwQlAAAAAMgoLg9GVapU0eLFi+3fXwtDn332mapXr+6qsgAAAABYiMun0r3xxht64okntGvXLl25ckXvvvuudu3apXXr1mnlypWuLg8AAACABbh8xOihhx7S1q1bdeXKFZUrV05Lly5VcHCwfvvtN91///2uLg8AAACABWSa+xilJ+5jBAAAAEBK/X2MXDKVLi4uTgEBAfavb+XafgAAAACQUVwyYuTu7q6YmBgFBwfLzc3thqvPGWNks9mUlJTkdP+MGAEAAACQMvmI0a+//qqcOXNKkqZNm6bChQvL3d3dYZ/k5GQdOnTIFeUBAAAAsBiXX2N0/ejR9WJjYxUcHMyIEQAAAIA0S+2IkctXpbs2Ze6/4uPj5ePj44KKAAAAAFiNy+5j1L9/f0lXb+g6bNgw+fn52bclJSVpw4YNqlixoouqAwAAAGAlLgtGW7ZskXR1xGjHjh3y8vKyb/Py8lKFChU0YMAAV5UHAAAAwEJcfo1Rx44d9e6776brstxcYwQAAABASv01Ri4PRhmBYAQAAABAykKLLwAAAACAqxGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5RGMAAAAAFgewQgAAACA5WW6YHTx4kVXlwAAAADAYjJFMEpOTtZrr72mggULyt/fXwcOHJAkDRs2TJ9//rmLqwMAAABwr8sUwWjMmDGaPn263nrrLXl5ednb77vvPn322WcurAwAAACAFWSKYPTFF1/ok08+UZs2beTu7m5vr1Chgvbs2ePCygAAAABYQaYIRkePHlWJEiVStCcnJ+vy5csuqAgAAACAlWSKYBQeHq7Vq1enaJ87d64qVarkgooAAAAAWImHqwuQpOHDh6t9+/Y6evSokpOTNX/+fEVFRemLL77QokWLXF0eAAAAgHuczRhjXF2EJK1evVqjR4/Wtm3bFB8fr8qVK2v48OF6/PHHne7r4pUMKBAAAABAluOTyqGgTBOM0hPBCAAAAICU+mCUKa4xKl68uGJjY1O0nzlzRsWLF3dBRQAAAACsJFMEo4MHDyopKSlF+6VLl3T06FEXVAQAAADASly6+MJ3331n//qnn35SYGCg/fukpCQtW7ZMISEhLqgMAAAAgJW49BojN7erA1Y2m03/LcPT01MhISGaMGGCnnzySaf65RojAAAAAFLqrzFy6YhRcnKyJKlYsWLatGmTcufO7cpyAAAAAFgUq9IBAAAAuGdlqVXpJGnlypV66qmnVKJECZUoUUKNGjXS6tWrXV0WAAAAAAvIFMHoyy+/VN26deXn56c+ffqoT58+8vX1VZ06dTRr1ixXlwcAAADgHpcpptKVKVNG3bp1U79+/RzaJ06cqE8//VS7d+92qj+m0gEAAACQUj+VLlMEI29vb+3cuVMlSpRwaN+3b5/uu+8+Xbx40an+CEYAAAAApCx2jVHhwoW1bNmyFO2//PKLChcu7IKKAAAAAFiJS5frvuall15Snz59tHXrVkVEREiS1q5dq+nTp+vdd991cXUAAAAA7nWZYiqdJC1YsEATJkywX09UpkwZDRw4UI0bN3a6L6bSAQAAAJCy2DVG6Y1gBAAAAEDKYtcYdenSRStWrHB1GQAAAAAsKlMEoxMnTqh+/foqXLiwBg4cqK1bt7q6JAAAAAAWkmmm0p0+fVrffPONZs2apdWrV6t06dJq06aNWrdurZCQEKf6YiodAAAAACmLX2N05MgRzZ49W1OnTtXevXt15YpzSYdgBAAAAEDKYtcYXe/y5cv6/ffftWHDBh08eFB58+Z1dUkAAAAA7nGZJhgtX75cXbt2Vd68edWhQwcFBARo0aJFOnLkiKtLAwAAAHCPyxRT6QoWLKhTp06pfv36atOmjZ566il5e3unuT+m0gEAAACQstg1Rp9++qmaN2+uHDly3HK/I0eOqECBAnJzu/VAF8EIAAAAgJTFglFqBQQEaOvWrSpevPgt9yMYAQAAAJCy8OILt5KFMhwAAACALCRLBSMAAAAAyAgEIwAAAACWRzACAAAAYHlZKhjZbDZXlwAAAADgHpSlghGLLwAAAADICFlque7Dhw+rQIECcnd3v+V+LNcNAAAAQMoC9zFq1qxZqvedP3++U30TjFxrzqxIzZj2uU6ePKFSYaU1+JVhKle+vKvLAu4qzgNYHecAwHmQWWT6+xgFBgbaHwEBAVq2bJl+//13+/Y//vhDy5YtU2BgoKtKRBos+fEHjX9rrLr37KU53yxQWFhpPd+9s2JjY11dGnDXcB7A6jgHAM6DrChTTKUbNGiQTp06pY8//tg+TS4pKUk9e/ZUQECA3n77baf6Y8TIddo821xl7yunV4YOlyQlJyfr8Tq11Kr1c+rctZuLqwPuDs4DWB3nAMB5kJlk+hGj602dOlUDBgxwuHbI3d1d/fv319SpU11YGZxxOTFRu3ftVLXqEfY2Nzc3VasWoe3btriwMuDu4TyA1XEOAJwHWVWmCEZXrlzRnj17UrTv2bNHycnJtzz20qVLiouLc3hcunQpo0rFLZw+c1pJSUnKlSuXQ3uuXLl08uRJF1UF3F2cB7A6zgGA8yCryhTBqGPHjurcubMmTpyoNWvWaM2aNZowYYK6dOmijh073vLYsWPHOlyvFBgYqLffHHuXKgcAAABwL0jljLuMNX78eOXLl08TJkxQTEyMJCl//vwaOHCgXnrppVseO2TIEPXv39+hzbh7Z1ituLmgHEFyd3dPcVFhbGyscufO7aKqgLuL8wBWxzkAcB5kVZlixMjNzU0vv/yyjh49qjNnzujMmTM6evSoXn755dves8jb21sBAQEOD29vgpEreHp5qUx4WW1Y/5u9LTk5WRs2/KbyFSq5sDLg7uE8gNVxDgCcB1lVphgxul5AQICrS8AdeK59Rw17ZZDKlr1P95Urry9nzlBCQoKaNE39fauArI7zAFbHOQBwHmRFmSYYzZ07V19//bUOHTqkxMREh22bN292UVVwVv0nGuj0qVP68IP3dPLkCYWVLqMPp3ymXAwbw0I4D2B1nAMA50FWlCnuY/Tee+/p1VdfVYcOHfTJJ5+oY8eO2r9/vzZt2qRevXrp9ddfd6o/7mMEAAAAQEr9fYwyRTAqXbq0RowYoVatWil79uzatm2bihcvruHDh+vUqVP64IMPnOqPYAQAAABAymI3eD106JAiIq7eAMvX11fnzp2TJD333HOaPXu2K0sDAAAAYAGZIhjly5dPp06dkiQVKVJE69evlyRFR0crEwxoAQAAALjHZYpgVLt2bX333XeSrt7stV+/fnrsscfUsmVLNW3a1MXVAQAAALjXZYprjJKTk5WcnCwPj6sTAOfMmaN169apZMmS6t69u7y8vJzqj2uMAAAAAEhZbPGF9EYwAgAAACClPhhlmvsYnT59Wp9//rl2794tSQoPD1fHjh2VM2dOF1cGAAAA4F6XKUaMVq1apUaNGikgIEBVqlSRJP3xxx86c+aMvv/+e9WsWdOp/hgxAgAAACBlsal05cqVU/Xq1fXRRx/J3d1dkpSUlKSePXtq3bp12rFjh1P9EYwAAAAASFksGPn6+mrr1q0KCwtzaI+KilLFihWVkJDgVH8EIwAAAABSFrvBa+XKle3XFl1v9+7dqlChggsqAgAAAGAlLlt8Yfv27fav+/Tpo759+2rfvn2qVq2aJGn9+vWaPHmyxo0b56oSAQAAAFiEy6bSubm5yWaz6XZPb7PZlJSU5FTfTKUDAAAAIGWB5bqjo6Nd9dQAAAAA4CBTLL6Q3hgxAgAAACBlgRGj/9q7d6+WL1+u48ePKzk52WHb8OHDXVQVAAAAACvIFCNGn376qZ5//nnlzp1b+fLlk81ms2+z2WzavHmzU/0xYgQAAABAymL3MSpatKh69uypQYMGpUt/BCMAAAAAUha7j9Hp06fVvHlzV5cBAAAAwKIyRTBq3ry5li5d6uoyAAAAAFiUy6bSvffee/avz58/r4kTJ6phw4YqV66cPD09Hfbt06ePU30zlQ4AAACAlAWuMSpWrFiq9rPZbDpw4IBTfROMAAAAAEhZIBhlJIIRAAAAACmLLb4wevRoXbhwIUV7QkKCRo8e7YKKAAAAAFhJphgxcnd3V0xMjIKDgx3aY2NjFRwcrKSkJKf6Y8QIAAAAgJTFRoyMMQ43db1m27ZtypkzpwsqAgAAAGAlqcxPGSMoKEg2m002m02lSpVyCEdJSUmKj49Xjx49XFghAAAAACtw6VS6GTNmyBijTp06adKkSQoMDLRv8/LyUkhIiKpXr+50v0ylAwAAACBlsVXpVq5cqYiIiBT3L0orghEAAAAAKYsFI+nq1Llvv/1Wu3fvliSVLVtWjRo1kru7u9N9EYwAAAAASFksGO3bt08NGjTQ0aNHFRYWJkmKiopS4cKFtXjxYoWGhjrVH8EIAAAAgJTFglGDBg1kjFFkZKR9FbrY2Fi1bdtWbm5uWrx4sVP9EYwAAAAASFksGGXLlk3r169XuXLlHNq3bdumGjVqKD4+3qn+CEYAAAAApCx2HyNvb2+dO3cuRXt8fLy8vLxcUBEAAAAAK8kUwejJJ59Ut27dtGHDBhljZIzR+vXr1aNHDzVq1MjV5QEAAAC4x2WKqXRnzpxR+/bt9f3339uX7L58+bIaN26s6dOnO9zfKDWYSgcAAABAymLXGF2zb98+7dq1S5IUHh6uEiVKpKkfghEAAAAAKfXBKJW7ZbzPP/9c77zzjvbu3StJKlmypF588UV16dLFxZUBAAAAuNdlimA0fPhwTZw4US+88IKqV68uSfrtt9/Ur18/HTp0SKNHj3ZxhQAAAADuZZliKl2ePHn03nvvqVWrVg7ts2fP1gsvvKCTJ0861R9T6QAAAABIWWy57suXL6tKlSop2u+//35duULKAQAAAJCxMkUweu655/TRRx+laP/kk0/Upk0bF1QEAAAAwEpcdo1R//797V/bbDZ99tlnWrp0qapVqyZJ2rBhgw4dOqR27dq5qkQAAAAAFuGya4weffTRVO1ns9n066+/OtU31xgBAAAAkLLofYzSC8EIAAAAgJTFFl8AAAAAAFciGAEAAACwPIIRAAAAAMsjGAEAAACwPIIRAAAAAMsjGAEAAACwPIIRAAAAAMsjGAEAAACwPIIRAAAAAMsjGAEAAACwPIIRAAAAAMsjGAEAAACwPIIRAAAAAMsjGAEAAACwPIIRAAAAAMsjGAEAAACwPIIRAAAAAMuzGWOMq4vAveXSpUsaO3ashgwZIm9vb1eXA7gE5wHAeQBInAdZCcEI6S4uLk6BgYE6e/asAgICXF0O4BKcBwDnASBxHmQlTKUDAAAAYHkEIwAAAACWRzACAAAAYHkEI6Q7b29vjRgxggsMYWmcBwDnASBxHmQlLL4AAAAAwPIYMQIAAABgeQQjAAAAAJZHMAIAAABgeQQjC3rkkUf04osvpnr/b7/9ViVKlJC7u7tTxwGZmc1m07fffpvq/VesWCGbzaYzZ85kWE1AZsH7HZmRs59fUiskJESTJk266faDBw/KZrNp69at6f7cHTp0UJMmTdK9X6QNwQi31b17dz3zzDM6fPiwXnvttQw7iadPn64cOXKke7/AjcTExOiJJ55I1z5HjhypihUrpmufEh9SkfH++4EzIiJCMTExCgwMvGs1ZOSHT0Dic4bk/B8FrcbD1QUgc4uPj9fx48dVr149FShQwNXlAOkiMTFR+fLlc3UZQKbl5eXFOQLAchgxsrhLly5pwIABKliwoLJly6YHH3xQK1askHT1r9TZs2eXJNWuXVs2m02PPPKIZsyYoYULF8pms8lms2nFihWKiIjQoEGDHPo+ceKEPD09tWrVKknS6dOn1a5dOwUFBcnPz09PPPGE9u7da3+ujh076uzZs/Z+R44cedsagdR45JFH1Lt3b7344ovKnTu36tWrl+KvZuvWrVPFihXl4+OjKlWq6Ntvv73hX6//+OMPValSRX5+foqIiFBUVJSkq3+JHDVqlLZt22Z/D0+fPl2tW7dWy5YtHfq4fPmycufOrS+++ELS1fd4nz59FBwcLB8fHz300EPatGmTpKt/RX/00UclSUFBQbLZbOrQoYMkKTk5WWPHjlWxYsXk6+urChUqaO7cuRnwE8S9rEOHDlq5cqXeffddh/fuf0cp16xZo4cffli+vr4qXLiw+vTpo/Pnz0uSXnnlFT344IMp+q5QoYJGjx4t6er7dfTo0SpUqJC8vb1VsWJFLVmyxL5vsWLFJEmVKlWy/39zzWeffaYyZcrIx8dHpUuX1ocffpgBPwlkBVeuXFHv3r0VGBio3Llza9iwYbp255m0fs6QpAsXLqhTp07Knj27ihQpok8++STFc+/Zs0cRERHy8fHRfffdp5UrV9q3JSUlqXPnzvbfx2FhYXr33Xcdjk9KSlL//v2VI0cO5cqVSy+//LJSe9ecTz75RAUKFFBycrJDe+PGjdWpUyf79x999JFCQ0Pl5eWlsLAwzZw5074tJCREktS0aVPZbDb795K0cOFCVa5cWT4+PipevLhGjRqlK1eupKq2e4qB5dSqVcv07dvXGGNMly5dTEREhFm1apXZt2+fefvtt423t7f566+/zKVLl0xUVJSRZObNm2diYmLM2bNnTYsWLUz9+vVNTEyMiYmJMZcuXTIffPCBKVKkiElOTrY/z/vvv+/Q1qhRI1OmTBmzatUqs3XrVlOvXj1TokQJk5iYaC5dumQmTZpkAgIC7P2eO3futjUCqVGrVi3j7+9vBg4caPbs2WP27NljJJkFCxYYY4w5e/asyZkzp2nbtq3ZuXOn+eGHH0ypUqWMJLNlyxZjjDHLly83ksyDDz5oVqxYYXbu3GkefvhhExERYYwx5sKFC+all14yZcuWtb+HL1y4YBYtWmR8fX3t72djjPn++++Nr6+viYuLM8YY06dPH1OgQAHzww8/mJ07d5r27duboKAgExsba65cuWLmzZtnJJmoqCgTExNjzpw5Y4wxZsyYMaZ06dJmyZIlZv/+/WbatGnG29vbrFix4u79cJHlnTlzxlSvXt107drV/t795ZdfjCRz+vRpY4wx+/btM9myZTPvvPOO+euvv8zatWtNpUqVTIcOHYwxxvz5559Gktm3b5+932tte/fuNcYYM3HiRBMQEGBmz55t9uzZY15++WXj6elp/12+ceNGI8n88ssvJiYmxsTGxhpjjPnyyy9N/vz5zbx588yBAwfMvHnzTM6cOc306dPv4k8JmcG13+V9+/Y1e/bsMV9++aXx8/Mzn3zyiTEm7Z8zihYtanLmzGkmT55s9u7da8aOHWvc3NzMnj17jDHGREdHG0mmUKFCZu7cuWbXrl2mS5cuJnv27ObkyZPGGGMSExPN8OHDzaZNm8yBAwfstX311Vf2+t98800TFBRk5s2bZ3bt2mU6d+5ssmfPbho3bnzb137q1Cnj5eVlfvnlF3tbbGysQ9v8+fONp6enmTx5somKijITJkww7u7u5tdffzXGGHP8+HEjyUybNs3ExMSY48ePG2OMWbVqlQkICDDTp083+/fvN0uXLjUhISFm5MiRd/gvlvUQjCzoWjD6+++/jbu7uzl69KjD9jp16pghQ4YYY4w5ffq0kWSWL19u396+ffsUJ/Hx48eNh4eHWbVqlb2tevXqZtCgQcYYY/766y8jyaxdu9a+/eTJk8bX19d8/fXXxhhjpk2bZgIDAx36TU2NwO3UqlXLVKpUyaHt+mD00UcfmVy5cpmEhAT79k8//fSGwej6/5QWL15sJNmPGzFihKlQoYLD81y+fNnkzp3bfPHFF/a2Vq1amZYtWxpjjImPjzeenp4mMjLSvj0xMdEUKFDAvPXWWw7Pfe1DqjHGXLx40fj5+Zl169Y5PF/nzp1Nq1atnPjpAI5/MDMm5Xuuc+fOplu3bg7HrF692ri5udnf/xUqVDCjR4+2bx8yZIh58MEH7d8XKFDAvP766w59PPDAA6Znz57GmP99+Lx2zl0TGhpqZs2a5dD22muvmerVq6fptSLrqlWrlilTpozDH2EHDRpkypQpk+bPGcZcDUZt27a1f5+cnGyCg4PNRx99ZIz533tz3Lhx9n0uX75sChUqZN58882b1turVy/z9NNP27/Pnz+//ff69X2kJhgZY0zjxo1Np06d7N9PmTLFFChQwCQlJRljjImIiDBdu3Z1OKZ58+amQYMG9u+v/7/vmjp16pg33njDoW3mzJkmf/78qarrXsJUOgvbsWOHkpKSVKpUKfn7+9sfK1eu1P79+53qK0+ePHr88ccVGRkpSYqOjtZvv/2mNm3aSJJ2794tDw8Ph6kWuXLlUlhYmHbv3n1XaoS13X///TfdFhUVpfLly8vHx8feVrVq1RvuW758efvX+fPnlyQdP378pn17eHioRYsW9nPj/PnzWrhwof3c2L9/vy5fvqwaNWrYj/H09FTVqlVveW7s27dPFy5c0GOPPeZwbnzxxRecG0h327Zt0/Tp0x3ea/Xq1VNycrKio6MlSW3atNGsWbMkScYYzZ492/4+j4uL0z///OPwPpekGjVq3PJ9fv78ee3fv1+dO3d2eO4xY8bwPreoatWqyWaz2b+vXr269u7dq127dqXpc8Y11/9ut9lsypcvX4rf7dWrV7d/7eHhoSpVqjj0PXnyZN1///3KkyeP/P399cknn+jQoUOSpLNnzyomJsahvmt9pFabNm00b948Xbp0SZIUGRmpZ599Vm5uVz/O79692+lzTLp6fo8ePdrhHOvatatiYmJ04cKFVNd3L2DxBQuLj4+Xu7u7/vjjD7m7uzts8/f3d7q/Nm3aqE+fPnr//fc1a9YslStXTuXKlctUNcK6smXLli79eHp62r++9p/zf+d8/1ebNm1Uq1YtHT9+XD///LN8fX1Vv379O6ojPj5ekrR48WIVLFjQYZu3t/cd9Q38V3x8vLp3764+ffqk2FakSBFJUqtWrTRo0CBt3rxZCQkJOnz4cIrr69LyvJL06aefpriG6b//JwB34vrf7dLV3++3+91+vTlz5mjAgAGaMGGCqlevruzZs+vtt9/Whg0b0q3Gp556SsYYLV68WA888IBWr16td9555477jY+P16hRo9SsWbMU267/g6EVEIwsrFKlSkpKStLx48f18MMPp/o4Ly8vJSUlpWhv3LixunXrpiVLlmjWrFlq166dfVuZMmV05coVbdiwQREREZKk2NhYRUVFKTw8/Kb9prVGwBlhYWH68ssvdenSJXuouLb4gTNudm5ERESocOHC+uqrr/Tjjz+qefPm9v+Er10ku3btWhUtWlTS1cUZNm3aZF8+2cvLS5Ic+g4PD5e3t7cOHTqkWrVqOV0rcL2bvXevqVy5snbt2qUSJUrcdJ9ChQqpVq1aioyMVEJCgh577DEFBwdLkgICAlSgQAGtXbvW4f26du1a++jsjd7nefPmVYECBXTgwAH76BOs7b9BY/369SpZsqTCw8PT9DnDGevXr1fNmjUlXV0E4o8//lDv3r0lXX0vR0REqGfPnvb9rx/VDAwMVP78+bVhw4YUfVSuXDlVz+/j46NmzZopMjJS+/btU1hYmMOxZcqU0dq1a9W+fXt729q1a+2vX7oaAP/7M6hcubKioqJueX5bBcHIwkqVKqU2bdqoXbt2mjBhgipVqqQTJ05o2bJlKl++vBo2bHjD40JCQvTTTz8pKipKuXLlUmBgoDw9PZUtWzY1adJEw4YN0+7du9WqVSv7MSVLllTjxo3VtWtXTZkyRdmzZ9fgwYNVsGBBNW7c2N5vfHy8li1bpgoVKsjPzy/NNQLOaN26tV599VV169ZNgwcP1qFDhzR+/HhJcpiycTshISGKjo7W1q1bVahQIWXPnt0etFq3bq2PP/5Yf/31l5YvX24/Jlu2bHr++ec1cOBA5cyZU0WKFNFbb72lCxcuqHPnzpKkokWLymazadGiRWrQoIF8fX2VPXt2DRgwQP369VNycrIeeughnT17VmvXrlVAQIDDf4zA7YSEhGjDhg06ePCg/P39U/ylfNCgQapWrZp69+6tLl26KFu2bNq1a5d+/vlnffDBB/b92rRpoxEjRigxMTHFX7IHDhyoESNGKDQ0VBUrVtS0adO0detW+zTT4OBg+fr6asmSJSpUqJB8fHwUGBioUaNGqU+fPgoMDFT9+vV16dIl/f777zp9+rT69++f8T8cZCqHDh1S//791b17d23evFnvv/++JkyYkObPGX5+fql+7smTJ6tkyZIqU6aM3nnnHZ0+fdq+IlzJkiX1xRdf6KefflKxYsU0c+ZMbdq0yb7aoiT17dtX48aNU8mSJVW6dGlNnDjR6fvTtWnTRk8++aR27typtm3bOmwbOHCgWrRooUqVKqlu3br6/vvvNX/+fP3yyy/2fUJCQrRs2TLVqFFD3t7eCgoK0vDhw/Xkk0+qSJEieuaZZ+Tm5qZt27bpzz//1JgxY5yqL8tz9UVOuPuuv8j22ioqISEhxtPT0+TPn980bdrUbN++3Rhz48UXjh8/bh577DHj7++fYtsPP/xgJJmaNWumeN5Tp06Z5557zgQGBhpfX19Tr169FCvL9ejRw+TKlctIMiNGjEhVjcDt/PfCcmNSXoC6du1aU758eePl5WXuv/9+M2vWLCPJvirRjRZA2LJli5FkoqOjjTFXF0R4+umnTY4cOewr/1yza9cuI8kULVrU4cJhY4xJSEgwL7zwgsmdO7fx9vY2NWrUMBs3bnTYZ/To0SZfvnzGZrOZ9u3bG2OuXiA8adIkExYWZjw9PU2ePHlMvXr1zMqVK+/o5wXriYqKMtWqVTO+vr729+5/3+8bN260/+7Pli2bKV++fIrFFE6fPm28vb2Nn5+fw0qMxhiTlJRkRo4caQoWLGg8PT1NhQoVzI8//uiwz6effmoKFy5s3NzcTK1ateztkZGRpmLFisbLy8sEBQWZmjVrmvnz56f7zwGZW61atUzPnj1Njx49TEBAgAkKCjKvvPKK/XdqWj9nFC1a1LzzzjsO+1WoUMG+/driC7NmzTJVq1Y1Xl5eJjw83L7amzFXf/936NDBBAYGmhw5cpjnn3/eDB482GFBnsuXL5u+ffuagIAAkyNHDtO/f3/Trl27VC++YMzV8yh//vxGktm/f3+K7R9++KEpXry48fT0NKVKlXJY+McYY7777jtTokQJ4+HhYYoWLWpvX7JkiYmIiDC+vr4mICDAVK1a1b7an5XYjEnlAuoAYCGRkZH2e174+vq6uhwAAJDBmEoHAJK++OILFS9eXAULFtS2bds0aNAgtWjRglAEAIBFEIwAQNKxY8c0fPhwHTt2TPnz51fz5s31+uuvu7osAIAFHDp0yGGRhP/atWuXfQVIZBym0gEAAAAudOXKFR08ePCm20NCQuThwXhGRiMYAQAAALA8N1cXAAAAAACuRjACAAAAYHkEIwAAAACWRzACAAAAYHkEIwAAAACWRzACAKTZsWPH1LdvX5UoUUI+Pj7KmzevatSooY8++kgXLlyw7xcSEiKbzSabzaZs2bKpcuXK+uabb1Jsu9GjQ4cOt6xh7dq18vDwUMWKFe1tTz31lOrXr3/D/VevXi2bzabt27en2FauXDn16NHjhsfNnDlT3t7eOnny5G1+KpLNZtO333572/0AAJkHwQgAkCYHDhxQpUqVtHTpUr3xxhvasmWLfvvtN7388statGiRfvnlF4f9R48erZiYGG3ZskUPPPCAWrZsqXXr1mnTpk2KiYlRTEyM5s2bJ0mKioqyt7377rs3reHMmTNq166d6tSp49DeuXNn/fzzzzpy5EiKY6ZNm6YqVaqofPnyKbZ17txZc+bMUUJCwg2Pa9SokXLnzp2qnw8AIGshGAEA0qRnz57y8PDQ77//rhYtWqhMmTIqXry4GjdurMWLF+upp55y2D979uzKly+fSpUqpcmTJ8vX11fff/+98uTJo3z58ilfvnzKmTOnJCk4ONjeFhgYeNMaevToodatW6t69eoO7U8++aTy5Mmj6dOnO7THx8frm2++UefOnW/YX9u2bZWQkGAPaNdER0drxYoV9uM++ugjhYaGysvLS2FhYZo5c6Z935CQEElS06ZNZbPZ7N9L0sKFC1W5cmX5+PioePHiGjVqlK5cuXLT1wcAuHsIRgAAp8XGxmrp0qXq1auXsmXLdsN9bDbbTY/38PCQp6enEhMT01zDtGnTdODAAY0YMeKG/bdr107Tp0/X9fcx/+abb5SUlKRWrVrdsM/cuXOrcePGmjp1qkP79OnTVahQIT3++ONasGCB+vbtq5deekl//vmnunfvro4dO2r58uWSpE2bNtnri4mJsX+/evVqtWvXTn379tWuXbs0ZcoUTZ8+Xa+//nqafwYAgPRDMAIAOG3fvn0yxigsLMyhPXfu3PL395e/v78GDRp0w2MTExM1duxYnT17VrVr107T8+/du1eDBw/Wl19+KQ8Pjxvu06lTJ+3fv18rV660t02bNk1PP/30LUehOnfurBUrVig6OlqSZIzRjBkz1L59e7m5uWn8+PHq0KGDevbsqVKlSql///5q1qyZxo8fL0nKkyePJClHjhzKly+f/ftRo0Zp8ODBat++vYoXL67HHntMr732mqZMmZKmnwEAIH0RjAAA6Wbjxo3aunWrypYtq0uXLjlsGzRokPz9/eXn56c333xT48aNU8OGDW/b57Wg5e/vrx49eigpKUmtW7fWqFGjVKpUqZseV7p0aUVERNhHf/bt26fVq1ffdBrdNY899pgKFSqkadOmSZKWLVumQ4cOqWPHjpKk3bt3q0aNGg7H1KhRQ7t3775lv9u2bdPo0aMdXk/Xrl0VExPjsFAFAMA1bvxnNgAAbqFEiRKy2WyKiopyaC9evLgkydfXN8UxAwcOVIcOHeTv76+8efPecqrd9bZu3Wr/OiAgQOfOndPvv/+uLVu2qHfv3pKk5ORkGWPk4eGhpUuX2keiOnfurBdeeEGTJ0/WtGnTFBoaqlq1at3y+dzc3NShQwfNmDFDI0eO1LRp0/Too4/aX1taxcfHa9SoUWrWrFmKbT4+PnfUNwDgzjFiBABwWq5cufTYY4/pgw8+0Pnz51N1TO7cuVWiRAnly5cv1aFIuhrCrj2Cg4MVEBCgHTt2aOvWrfZHjx49FBYWpq1bt+rBBx+0H9uiRQu5ublp1qxZ+uKLL9SpU6dUPXfHjh11+PBhzZ8/XwsWLHAYZSpTpozWrl3rsP/atWsVHh5u/97T01NJSUkO+1SuXFlRUVEOr+faw82N/44BwNUYMQIApMmHH36oGjVqqEqVKho5cqTKly8vNzc3bdq0SXv27NH999+fIc/r5uam++67z6EtODhYPj4+Kdr9/f3VsmVLDRkyRHFxcbe9J9I1xYoVU+3atdWtWzd5e3s7jPIMHDhQLVq0UKVKlVS3bl19//33mj9/vsPy5CEhIVq2bJlq1Kghb29vBQUFafjw4XryySdVpEgRPfPMM3Jzc9O2bdv0559/asyYMWn/gQAA0gV/ogIApEloaKi2bNmiunXrasiQIapQoYKqVKmi999/XwMGDNBrr73m6hIlXZ1Od/r0adWrV08FChRw+rjWrVs7THVr0qSJ3n33XY0fP15ly5bVlClTNG3aND3yyCP2fSZMmKCff/5ZhQsXVqVKlSRJ9erV06JFi7R06VI98MADqlatmt555x0VLVo03V4rACDtbOb6dUwBAAAAwIIYMQIAAABgeQQjAAAAAJZHMAIAAABgeQQjAAAAAJZHMAIAAABgeQQjAAAAAJZHMAIAAABgeQQjAAAAAJZHMAIAAABgeQQjAAAAAJZHMAIAAABgef8HWA2Jk61Jt7wAAAAASUVORK5CYII=",
112
+ "text/plain": [
113
+ "<Figure size 1000x700 with 1 Axes>"
114
+ ]
115
+ },
116
+ "metadata": {},
117
+ "output_type": "display_data"
118
+ }
119
+ ],
120
+ "source": [
121
+ "import seaborn as sns\n",
122
+ "import matplotlib.pyplot as plt\n",
123
+ "from sklearn.metrics import confusion_matrix\n",
124
+ "import pandas as pd\n",
125
+ "\n",
126
+ "# Assuming df is your DataFrame\n",
127
+ "\n",
128
+ "# True labels and predictions\n",
129
+ "y_true = filtered_df[\"type\"]\n",
130
+ "y_pred = filtered_df[\"gpt_vote\"]\n",
131
+ "\n",
132
+ "# Compute the confusion matrix\n",
133
+ "conf_matrix = confusion_matrix(y_true, y_pred, labels=[\"leftvote\", \"rightvote\", \"tievote\", \"bothbad_vote\"])\n",
134
+ "\n",
135
+ "# Create a pandas DataFrame from the confusion matrix\n",
136
+ "conf_matrix_df = pd.DataFrame(conf_matrix, index=[\"leftvote\", \"rightvote\", \"tievote\", \"bothbad_vote\"], columns=[\"leftvote\", \"rightvote\", \"tievote\", \"bothbad_vote\"])\n",
137
+ "\n",
138
+ "# Plotting the heatmap\n",
139
+ "plt.figure(figsize=(10, 7))\n",
140
+ "sns.heatmap(conf_matrix_df, annot=True, fmt=\"d\", cmap=\"Blues\", cbar=False)\n",
141
+ "plt.title(\"Arena Human vs GPT-4V Confusion Matrix\")\n",
142
+ "plt.xlabel(\"GPT-4V Vote\")\n",
143
+ "plt.ylabel(\"Arena Human Vote\")\n",
144
+ "plt.show()\n"
145
+ ]
146
+ },
147
+ {
148
+ "cell_type": "code",
149
+ "execution_count": 46,
150
+ "metadata": {},
151
+ "outputs": [
152
+ {
153
+ "name": "stdout",
154
+ "output_type": "stream",
155
+ "text": [
156
+ "Accuracy: 0.5842911877394636\n",
157
+ "F1 Score (Macro): 0.514392348541452\n",
158
+ "F1 Score (Micro): 0.5842911877394636\n",
159
+ "F1 Score (Weighted): 0.5536668839130223\n"
160
+ ]
161
+ }
162
+ ],
163
+ "source": [
164
+ "from sklearn.metrics import accuracy_score, f1_score\n",
165
+ "\n",
166
+ "# Assuming df is your DataFrame and it contains 'type' as true labels and 'gpt_vote' as predictions\n",
167
+ "y_true = filtered_df['type']\n",
168
+ "y_pred = filtered_df['gpt_vote']\n",
169
+ "\n",
170
+ "# Calculate accuracy\n",
171
+ "accuracy = accuracy_score(y_true, y_pred)\n",
172
+ "print(f'Accuracy: {accuracy}')\n",
173
+ "\n",
174
+ "# Calculate F1 score, here using 'macro' average to treat all classes equally\n",
175
+ "f1 = f1_score(y_true, y_pred, average='macro')\n",
176
+ "print(f'F1 Score (Macro): {f1}')\n",
177
+ "\n",
178
+ "# If you want to calculate F1 score with other averages, for example 'micro' or 'weighted', you can do:\n",
179
+ "f1_micro = f1_score(y_true, y_pred, average='micro')\n",
180
+ "print(f'F1 Score (Micro): {f1_micro}')\n",
181
+ "\n",
182
+ "f1_weighted = f1_score(y_true, y_pred, average='weighted')\n",
183
+ "print(f'F1 Score (Weighted): {f1_weighted}')"
184
+ ]
185
+ },
186
+ {
187
+ "cell_type": "code",
188
+ "execution_count": null,
189
+ "metadata": {},
190
+ "outputs": [],
191
+ "source": []
192
+ },
193
+ {
194
+ "cell_type": "code",
195
+ "execution_count": 47,
196
+ "metadata": {},
197
+ "outputs": [
198
+ {
199
+ "name": "stdout",
200
+ "output_type": "stream",
201
+ "text": [
202
+ "Cohen's Kappa Score: 0.3442144615665177\n"
203
+ ]
204
+ }
205
+ ],
206
+ "source": [
207
+ "from sklearn.metrics import cohen_kappa_score\n",
208
+ "\n",
209
+ "# Assuming df is your DataFrame and it contains 'type' as true labels and 'gpt_vote' as predictions\n",
210
+ "y_true = filtered_df['type']\n",
211
+ "y_pred = filtered_df['gpt_vote']\n",
212
+ "\n",
213
+ "# Calculate Cohen's Kappa score\n",
214
+ "kappa = cohen_kappa_score(y_true, y_pred)\n",
215
+ "print(f'Cohen\\'s Kappa Score: {kappa}')\n"
216
+ ]
217
+ },
218
+ {
219
+ "cell_type": "code",
220
+ "execution_count": 48,
221
+ "metadata": {},
222
+ "outputs": [
223
+ {
224
+ "name": "stdout",
225
+ "output_type": "stream",
226
+ "text": [
227
+ "Accuracy Score: 0.5842911877394636\n"
228
+ ]
229
+ }
230
+ ],
231
+ "source": [
232
+ "from sklearn.metrics import accuracy_score\n",
233
+ "accuracy = accuracy_score(y_true, y_pred)\n",
234
+ "print(f'Accuracy Score: {accuracy}')\n"
235
+ ]
236
+ },
237
+ {
238
+ "cell_type": "code",
239
+ "execution_count": 49,
240
+ "metadata": {},
241
+ "outputs": [
242
+ {
243
+ "name": "stdout",
244
+ "output_type": "stream",
245
+ "text": [
246
+ "Pearson Correlation Coefficient: 0.2880096104357029\n"
247
+ ]
248
+ }
249
+ ],
250
+ "source": [
251
+ "import pandas as pd\n",
252
+ "\n",
253
+ "# Assuming filtered_df is your DataFrame and it contains 'type' and 'gpt_vote' columns\n",
254
+ "# Convert 'type' and 'gpt_vote' to categorical codes\n",
255
+ "filtered_df['type_int'] = pd.factorize(filtered_df['type'])[0]\n",
256
+ "filtered_df['gpt_vote_int'] = pd.factorize(filtered_df['gpt_vote'])[0]\n",
257
+ "\n",
258
+ "# Now you can calculate Pearson correlation between these new integer columns\n",
259
+ "pearson_correlation = filtered_df['type_int'].corr(filtered_df['gpt_vote_int'])\n",
260
+ "print(f'Pearson Correlation Coefficient: {pearson_correlation}')\n"
261
+ ]
262
+ },
263
+ {
264
+ "cell_type": "code",
265
+ "execution_count": null,
266
+ "metadata": {},
267
+ "outputs": [],
268
+ "source": []
269
+ },
270
+ {
271
+ "cell_type": "code",
272
+ "execution_count": null,
273
+ "metadata": {},
274
+ "outputs": [],
275
+ "source": []
276
+ },
277
+ {
278
+ "cell_type": "code",
279
+ "execution_count": null,
280
+ "metadata": {},
281
+ "outputs": [],
282
+ "source": []
283
+ },
284
+ {
285
+ "cell_type": "code",
286
+ "execution_count": null,
287
+ "metadata": {},
288
+ "outputs": [],
289
+ "source": []
290
+ },
291
+ {
292
+ "cell_type": "code",
293
+ "execution_count": null,
294
+ "metadata": {},
295
+ "outputs": [],
296
+ "source": []
297
+ }
298
+ ],
299
+ "metadata": {
300
+ "kernelspec": {
301
+ "display_name": "otask",
302
+ "language": "python",
303
+ "name": "python3"
304
+ },
305
+ "language_info": {
306
+ "codemirror_mode": {
307
+ "name": "ipython",
308
+ "version": 3
309
+ },
310
+ "file_extension": ".py",
311
+ "mimetype": "text/x-python",
312
+ "name": "python",
313
+ "nbconvert_exporter": "python",
314
+ "pygments_lexer": "ipython3",
315
+ "version": "3.10.13"
316
+ },
317
+ "orig_nbformat": 4
318
+ },
319
+ "nbformat": 4,
320
+ "nbformat_minor": 2
321
+ }
arena_elo/get_latest_data.sh ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ # set LOGDIR to default if not set before
3
+ if [ -z "$LOGDIR" ]; then
4
+ export LOGDIR="./vision-arena-logs"
5
+ fi
6
+ mkdir -p results
7
+
8
+
9
+ # # for battle data
10
+ python -m elo_rating.clean_battle_data --model_infos_file "./model_infos.json" --mode conv_release
11
+ battle_cutoff_date=`cat cut_off_date.txt` && rm cut_off_date.txt && echo "Battle data last updated on $battle_cutoff_date"
12
+
13
+ mkdir -p ./results/latest
14
+ mkdir -p ./results/$battle_cutoff_date && mv ./clean_battle_conv_$battle_cutoff_date.json ./results/$battle_cutoff_date/clean_battle_conv.json
15
+ cp ./results/$battle_cutoff_date/clean_battle_conv.json ./results/latest/clean_battle_conv.json
16
+
17
+ echo "Battle data last updated on $battle_cutoff_date" >> ./results/latest/latest_updated_date.txt
arena_elo/pyproject.toml ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [build-system]
2
+ requires = ["setuptools>=61.0"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "arena_elo"
7
+ version = "0.2.35"
8
+ description = "Elo rating system for WildVision Bench Arena"
9
+ readme = "README.md"
10
+ requires-python = ">=3.9"
11
+ classifiers = [
12
+ "Programming Language :: Python :: 3",
13
+ "License :: OSI Approved :: Apache Software License",
14
+ ]
15
+ dependencies = [
16
+ "numpy", "prompt_toolkit>=3.0.0", "uvicorn","polyglot", "pyicu", "pycld2", "morfessor", "scikit-learn",
17
+ "pytz", "tqdm", "pandas", "plotly", "fire", "Pillow"
18
+ ]
19
+
20
+ [project.urls]
21
+ "Homepage" = "https://github.com/WildVision-Bench/Arena-Elo"
22
+ "Bug Tracker" = "https://github.com/WildVision-Bench/Arena-Elo/issues"
23
+
24
+ [tool.setuptools.packages.find]
25
+ exclude = ["assets*", "benchmark*", "docs", "dist*", "playground*", "scripts*", "tests*"]
26
+
27
+ [tool.wheel]
28
+ exclude = ["assets*", "benchmark*", "docs", "dist*", "playground*", "scripts*", "tests*"]
arena_elo/requirements.txt ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ -e git+https://github.com/WildVision-Bench/Arena-Elo.git@9dc2fa8543a2e9eda3d5bc01c2212fdfcdd4bfb5#egg=arena_elo
2
+ click==8.1.7
3
+ fire==0.5.0
4
+ h11==0.14.0
5
+ joblib==1.3.2
6
+ Morfessor==2.0.6
7
+ numpy==1.26.4
8
+ packaging==23.2
9
+ pandas==2.2.0
10
+ pillow==10.2.0
11
+ plotly==5.18.0
12
+ polyglot==16.7.4
13
+ prompt-toolkit==3.0.43
14
+ pycld2==0.41
15
+ PyICU==2.12
16
+ python-dateutil==2.8.2
17
+ pytz==2024.1
18
+ scikit-learn==1.4.0
19
+ scipy==1.12.0
20
+ six==1.16.0
21
+ tenacity==8.2.3
22
+ termcolor==2.4.0
23
+ threadpoolctl==3.2.0
24
+ tqdm==4.66.2
25
+ typing_extensions==4.9.0
26
+ tzdata==2024.1
27
+ uvicorn==0.27.1
28
+ wcwidth==0.2.13
arena_elo/results/20241224/clean_battle.json ADDED
@@ -0,0 +1,210 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "model_a": "FLUX-1",
4
+ "model_b": "ChatDiT",
5
+ "winner": "model_b",
6
+ "judge": "arena_user_127.0.0.1",
7
+ "anony": true,
8
+ "tstamp": 1735030427.6669
9
+ },
10
+ {
11
+ "model_a": "FLUX-1",
12
+ "model_b": "ChatDiT",
13
+ "winner": "model_a",
14
+ "judge": "arena_user_127.0.0.1",
15
+ "anony": true,
16
+ "tstamp": 1735030452.0238
17
+ },
18
+ {
19
+ "model_a": "ChatDiT",
20
+ "model_b": "FLUX-1",
21
+ "winner": "model_a",
22
+ "judge": "arena_user_127.0.0.1",
23
+ "anony": true,
24
+ "tstamp": 1735030464.2602
25
+ },
26
+ {
27
+ "model_a": "ChatDiT",
28
+ "model_b": "FLUX-1",
29
+ "winner": "model_a",
30
+ "judge": "arena_user_127.0.0.1",
31
+ "anony": true,
32
+ "tstamp": 1735030476.2328
33
+ },
34
+ {
35
+ "model_a": "FLUX-1",
36
+ "model_b": "ChatDiT",
37
+ "winner": "tie (bothbad)",
38
+ "judge": "arena_user_127.0.0.1",
39
+ "anony": true,
40
+ "tstamp": 1735030495.2955
41
+ },
42
+ {
43
+ "model_a": "ChatDiT",
44
+ "model_b": "FLUX-1",
45
+ "winner": "tie (bothbad)",
46
+ "judge": "arena_user_127.0.0.1",
47
+ "anony": true,
48
+ "tstamp": 1735030503.418
49
+ },
50
+ {
51
+ "model_a": "ChatDiT",
52
+ "model_b": "FLUX-1",
53
+ "winner": "model_a",
54
+ "judge": "arena_user_127.0.0.1",
55
+ "anony": true,
56
+ "tstamp": 1735030511.3926
57
+ },
58
+ {
59
+ "model_a": "ChatDiT",
60
+ "model_b": "FLUX-1",
61
+ "winner": "tie (bothbad)",
62
+ "judge": "arena_user_127.0.0.1",
63
+ "anony": true,
64
+ "tstamp": 1735034259.9984
65
+ },
66
+ {
67
+ "model_a": "ChatDiT",
68
+ "model_b": "FLUX-1",
69
+ "winner": "model_a",
70
+ "judge": "arena_user_127.0.0.1",
71
+ "anony": true,
72
+ "tstamp": 1735034275.6871
73
+ },
74
+ {
75
+ "model_a": "ChatDiT",
76
+ "model_b": "FLUX-1",
77
+ "winner": "model_a",
78
+ "judge": "arena_user_127.0.0.1",
79
+ "anony": true,
80
+ "tstamp": 1735034284.7354
81
+ },
82
+ {
83
+ "model_a": "FLUX-1",
84
+ "model_b": "ChatDiT",
85
+ "winner": "model_a",
86
+ "judge": "arena_user_127.0.0.1",
87
+ "anony": true,
88
+ "tstamp": 1735034293.468
89
+ },
90
+ {
91
+ "model_a": "ChatDiT",
92
+ "model_b": "FLUX-1",
93
+ "winner": "model_b",
94
+ "judge": "arena_user_127.0.0.1",
95
+ "anony": true,
96
+ "tstamp": 1735034303.2042
97
+ },
98
+ {
99
+ "model_a": "ChatDiT",
100
+ "model_b": "FLUX-1",
101
+ "winner": "model_a",
102
+ "judge": "arena_user_127.0.0.1",
103
+ "anony": true,
104
+ "tstamp": 1735034314.1941
105
+ },
106
+ {
107
+ "model_a": "FLUX-1",
108
+ "model_b": "ChatDiT",
109
+ "winner": "model_a",
110
+ "judge": "arena_user_127.0.0.1",
111
+ "anony": true,
112
+ "tstamp": 1735034326.5092
113
+ },
114
+ {
115
+ "model_a": "FLUX-1",
116
+ "model_b": "ChatDiT",
117
+ "winner": "model_b",
118
+ "judge": "arena_user_127.0.0.1",
119
+ "anony": true,
120
+ "tstamp": 1735034331.6963
121
+ },
122
+ {
123
+ "model_a": "FLUX-1",
124
+ "model_b": "ChatDiT",
125
+ "winner": "tie (bothbad)",
126
+ "judge": "arena_user_127.0.0.1",
127
+ "anony": true,
128
+ "tstamp": 1735034336.5346
129
+ },
130
+ {
131
+ "model_a": "ChatDiT",
132
+ "model_b": "FLUX-1",
133
+ "winner": "model_b",
134
+ "judge": "arena_user_127.0.0.1",
135
+ "anony": true,
136
+ "tstamp": 1735034351.9521
137
+ },
138
+ {
139
+ "model_a": "FLUX-1",
140
+ "model_b": "ChatDiT",
141
+ "winner": "model_b",
142
+ "judge": "arena_user_127.0.0.1",
143
+ "anony": true,
144
+ "tstamp": 1735034366.1775
145
+ },
146
+ {
147
+ "model_a": "FLUX-1",
148
+ "model_b": "ChatDiT",
149
+ "winner": "model_a",
150
+ "judge": "arena_user_127.0.0.1",
151
+ "anony": true,
152
+ "tstamp": 1735034380.5877
153
+ },
154
+ {
155
+ "model_a": "ChatDiT",
156
+ "model_b": "FLUX-1",
157
+ "winner": "model_b",
158
+ "judge": "arena_user_127.0.0.1",
159
+ "anony": true,
160
+ "tstamp": 1735034384.3087
161
+ },
162
+ {
163
+ "model_a": "FLUX-1",
164
+ "model_b": "ChatDiT",
165
+ "winner": "model_a",
166
+ "judge": "arena_user_127.0.0.1",
167
+ "anony": true,
168
+ "tstamp": 1735034389.1583
169
+ },
170
+ {
171
+ "model_a": "FLUX-1",
172
+ "model_b": "ChatDiT",
173
+ "winner": "model_b",
174
+ "judge": "arena_user_127.0.0.1",
175
+ "anony": true,
176
+ "tstamp": 1735034405.9359
177
+ },
178
+ {
179
+ "model_a": "FLUX-1",
180
+ "model_b": "ChatDiT",
181
+ "winner": "model_b",
182
+ "judge": "arena_user_127.0.0.1",
183
+ "anony": true,
184
+ "tstamp": 1735034412.3533
185
+ },
186
+ {
187
+ "model_a": "FLUX-1",
188
+ "model_b": "ChatDiT",
189
+ "winner": "model_a",
190
+ "judge": "arena_user_127.0.0.1",
191
+ "anony": true,
192
+ "tstamp": 1735034419.0118
193
+ },
194
+ {
195
+ "model_a": "FLUX-1",
196
+ "model_b": "ChatDiT",
197
+ "winner": "model_b",
198
+ "judge": "arena_user_127.0.0.1",
199
+ "anony": true,
200
+ "tstamp": 1735034425.6972
201
+ },
202
+ {
203
+ "model_a": "FLUX-1",
204
+ "model_b": "ChatDiT",
205
+ "winner": "model_b",
206
+ "judge": "arena_user_127.0.0.1",
207
+ "anony": true,
208
+ "tstamp": 1735034432.5891
209
+ }
210
+ ]
arena_elo/results/20241224/elo_results.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:73f08bd5db0423e0fec2bcc78b8d8153ff04a93d88cd4877136d08ba50ea3a84
3
+ size 44938
arena_elo/results/20241224/leaderboard.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ key,Model,Arena Elo rating (anony),Arena Elo rating (full),license,creator,link
2
+ ChatDiT,ChatDiT,1026.7303773982676,1026.7303773982676,-,Tongyi Lab,https://github.com/ali-vilab/ChatDiT
3
+ FLUX-1,FLUX-1,973.2696226017323,973.2696226017323,-,Black Forest Labs,https://www.basedlabs.ai/tools/flux1
arena_elo/results/20241226/clean_battle.json ADDED
@@ -0,0 +1,482 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "model_a": "FLUX-1",
4
+ "model_b": "ChatDiT",
5
+ "winner": "model_b",
6
+ "judge": "arena_user_127.0.0.1",
7
+ "anony": true,
8
+ "tstamp": 1735030427.6669
9
+ },
10
+ {
11
+ "model_a": "FLUX-1",
12
+ "model_b": "ChatDiT",
13
+ "winner": "model_a",
14
+ "judge": "arena_user_127.0.0.1",
15
+ "anony": true,
16
+ "tstamp": 1735030452.0238
17
+ },
18
+ {
19
+ "model_a": "ChatDiT",
20
+ "model_b": "FLUX-1",
21
+ "winner": "model_a",
22
+ "judge": "arena_user_127.0.0.1",
23
+ "anony": true,
24
+ "tstamp": 1735030464.2602
25
+ },
26
+ {
27
+ "model_a": "ChatDiT",
28
+ "model_b": "FLUX-1",
29
+ "winner": "model_a",
30
+ "judge": "arena_user_127.0.0.1",
31
+ "anony": true,
32
+ "tstamp": 1735030476.2328
33
+ },
34
+ {
35
+ "model_a": "FLUX-1",
36
+ "model_b": "ChatDiT",
37
+ "winner": "tie (bothbad)",
38
+ "judge": "arena_user_127.0.0.1",
39
+ "anony": true,
40
+ "tstamp": 1735030495.2955
41
+ },
42
+ {
43
+ "model_a": "ChatDiT",
44
+ "model_b": "FLUX-1",
45
+ "winner": "tie (bothbad)",
46
+ "judge": "arena_user_127.0.0.1",
47
+ "anony": true,
48
+ "tstamp": 1735030503.418
49
+ },
50
+ {
51
+ "model_a": "ChatDiT",
52
+ "model_b": "FLUX-1",
53
+ "winner": "model_a",
54
+ "judge": "arena_user_127.0.0.1",
55
+ "anony": true,
56
+ "tstamp": 1735030511.3926
57
+ },
58
+ {
59
+ "model_a": "ChatDiT",
60
+ "model_b": "FLUX-1",
61
+ "winner": "tie (bothbad)",
62
+ "judge": "arena_user_127.0.0.1",
63
+ "anony": true,
64
+ "tstamp": 1735034259.9984
65
+ },
66
+ {
67
+ "model_a": "ChatDiT",
68
+ "model_b": "FLUX-1",
69
+ "winner": "model_a",
70
+ "judge": "arena_user_127.0.0.1",
71
+ "anony": true,
72
+ "tstamp": 1735034275.6871
73
+ },
74
+ {
75
+ "model_a": "ChatDiT",
76
+ "model_b": "FLUX-1",
77
+ "winner": "model_a",
78
+ "judge": "arena_user_127.0.0.1",
79
+ "anony": true,
80
+ "tstamp": 1735034284.7354
81
+ },
82
+ {
83
+ "model_a": "FLUX-1",
84
+ "model_b": "ChatDiT",
85
+ "winner": "model_a",
86
+ "judge": "arena_user_127.0.0.1",
87
+ "anony": true,
88
+ "tstamp": 1735034293.468
89
+ },
90
+ {
91
+ "model_a": "ChatDiT",
92
+ "model_b": "FLUX-1",
93
+ "winner": "model_b",
94
+ "judge": "arena_user_127.0.0.1",
95
+ "anony": true,
96
+ "tstamp": 1735034303.2042
97
+ },
98
+ {
99
+ "model_a": "ChatDiT",
100
+ "model_b": "FLUX-1",
101
+ "winner": "model_a",
102
+ "judge": "arena_user_127.0.0.1",
103
+ "anony": true,
104
+ "tstamp": 1735034314.1941
105
+ },
106
+ {
107
+ "model_a": "FLUX-1",
108
+ "model_b": "ChatDiT",
109
+ "winner": "model_a",
110
+ "judge": "arena_user_127.0.0.1",
111
+ "anony": true,
112
+ "tstamp": 1735034326.5092
113
+ },
114
+ {
115
+ "model_a": "FLUX-1",
116
+ "model_b": "ChatDiT",
117
+ "winner": "model_b",
118
+ "judge": "arena_user_127.0.0.1",
119
+ "anony": true,
120
+ "tstamp": 1735034331.6963
121
+ },
122
+ {
123
+ "model_a": "FLUX-1",
124
+ "model_b": "ChatDiT",
125
+ "winner": "tie (bothbad)",
126
+ "judge": "arena_user_127.0.0.1",
127
+ "anony": true,
128
+ "tstamp": 1735034336.5346
129
+ },
130
+ {
131
+ "model_a": "ChatDiT",
132
+ "model_b": "FLUX-1",
133
+ "winner": "model_b",
134
+ "judge": "arena_user_127.0.0.1",
135
+ "anony": true,
136
+ "tstamp": 1735034351.9521
137
+ },
138
+ {
139
+ "model_a": "FLUX-1",
140
+ "model_b": "ChatDiT",
141
+ "winner": "model_b",
142
+ "judge": "arena_user_127.0.0.1",
143
+ "anony": true,
144
+ "tstamp": 1735034366.1775
145
+ },
146
+ {
147
+ "model_a": "FLUX-1",
148
+ "model_b": "ChatDiT",
149
+ "winner": "model_a",
150
+ "judge": "arena_user_127.0.0.1",
151
+ "anony": true,
152
+ "tstamp": 1735034380.5877
153
+ },
154
+ {
155
+ "model_a": "ChatDiT",
156
+ "model_b": "FLUX-1",
157
+ "winner": "model_b",
158
+ "judge": "arena_user_127.0.0.1",
159
+ "anony": true,
160
+ "tstamp": 1735034384.3087
161
+ },
162
+ {
163
+ "model_a": "FLUX-1",
164
+ "model_b": "ChatDiT",
165
+ "winner": "model_a",
166
+ "judge": "arena_user_127.0.0.1",
167
+ "anony": true,
168
+ "tstamp": 1735034389.1583
169
+ },
170
+ {
171
+ "model_a": "FLUX-1",
172
+ "model_b": "ChatDiT",
173
+ "winner": "model_b",
174
+ "judge": "arena_user_127.0.0.1",
175
+ "anony": true,
176
+ "tstamp": 1735034405.9359
177
+ },
178
+ {
179
+ "model_a": "FLUX-1",
180
+ "model_b": "ChatDiT",
181
+ "winner": "model_b",
182
+ "judge": "arena_user_127.0.0.1",
183
+ "anony": true,
184
+ "tstamp": 1735034412.3533
185
+ },
186
+ {
187
+ "model_a": "FLUX-1",
188
+ "model_b": "ChatDiT",
189
+ "winner": "model_a",
190
+ "judge": "arena_user_127.0.0.1",
191
+ "anony": true,
192
+ "tstamp": 1735034419.0118
193
+ },
194
+ {
195
+ "model_a": "FLUX-1",
196
+ "model_b": "ChatDiT",
197
+ "winner": "model_b",
198
+ "judge": "arena_user_127.0.0.1",
199
+ "anony": true,
200
+ "tstamp": 1735034425.6972
201
+ },
202
+ {
203
+ "model_a": "FLUX-1",
204
+ "model_b": "ChatDiT",
205
+ "winner": "model_b",
206
+ "judge": "arena_user_127.0.0.1",
207
+ "anony": true,
208
+ "tstamp": 1735034432.5891
209
+ },
210
+ {
211
+ "model_a": "ChatDiT",
212
+ "model_b": "FLUX-1",
213
+ "winner": "model_a",
214
+ "judge": "arena_user_127.0.0.1",
215
+ "anony": true,
216
+ "tstamp": 1735092762.0
217
+ },
218
+ {
219
+ "model_a": "FLUX-1",
220
+ "model_b": "ChatDiT",
221
+ "winner": "tie (bothbad)",
222
+ "judge": "arena_user_127.0.0.1",
223
+ "anony": true,
224
+ "tstamp": 1735092774.618
225
+ },
226
+ {
227
+ "model_a": "FLUX-1",
228
+ "model_b": "ChatDiT",
229
+ "winner": "model_a",
230
+ "judge": "arena_user_127.0.0.1",
231
+ "anony": true,
232
+ "tstamp": 1735092797.2067
233
+ },
234
+ {
235
+ "model_a": "FLUX-1",
236
+ "model_b": "ChatDiT",
237
+ "winner": "model_b",
238
+ "judge": "arena_user_127.0.0.1",
239
+ "anony": true,
240
+ "tstamp": 1735092804.6699
241
+ },
242
+ {
243
+ "model_a": "FLUX-1",
244
+ "model_b": "ChatDiT",
245
+ "winner": "model_a",
246
+ "judge": "arena_user_127.0.0.1",
247
+ "anony": true,
248
+ "tstamp": 1735092810.2635
249
+ },
250
+ {
251
+ "model_a": "FLUX-1",
252
+ "model_b": "ChatDiT",
253
+ "winner": "model_b",
254
+ "judge": "arena_user_127.0.0.1",
255
+ "anony": true,
256
+ "tstamp": 1735093113.5724
257
+ },
258
+ {
259
+ "model_a": "ChatDiT",
260
+ "model_b": "FLUX-1",
261
+ "winner": "tie (bothbad)",
262
+ "judge": "arena_user_127.0.0.1",
263
+ "anony": true,
264
+ "tstamp": 1735093133.2436
265
+ },
266
+ {
267
+ "model_a": "GPT-4o + Stable Diffusion 3 Medium",
268
+ "model_b": "GPT-4o + OmniGen",
269
+ "winner": "model_a",
270
+ "judge": "arena_user_127.0.0.1",
271
+ "anony": true,
272
+ "tstamp": 1735187628.4881
273
+ },
274
+ {
275
+ "model_a": "GPT-4o + Stable Diffusion 3 Medium",
276
+ "model_b": "GPT-4o + PixArt-Sigma",
277
+ "winner": "model_b",
278
+ "judge": "arena_user_127.0.0.1",
279
+ "anony": true,
280
+ "tstamp": 1735187649.4872
281
+ },
282
+ {
283
+ "model_a": "GPT-4o + Emu2",
284
+ "model_b": "ChatDiT",
285
+ "winner": "model_a",
286
+ "judge": "arena_user_127.0.0.1",
287
+ "anony": true,
288
+ "tstamp": 1735197562.2637
289
+ },
290
+ {
291
+ "model_a": "GPT-4o + FLUX.1 [dev]",
292
+ "model_b": "GPT-4o + PixArt-Sigma",
293
+ "winner": "model_a",
294
+ "judge": "arena_user_127.0.0.1",
295
+ "anony": true,
296
+ "tstamp": 1735197586.8438
297
+ },
298
+ {
299
+ "model_a": "ChatDiT",
300
+ "model_b": "GPT-4o + FLUX.1 [dev]",
301
+ "winner": "model_a",
302
+ "judge": "arena_user_127.0.0.1",
303
+ "anony": false,
304
+ "tstamp": 1735201758.7145
305
+ },
306
+ {
307
+ "model_a": "GPT-4o + DALLE-3",
308
+ "model_b": "GPT-4o + PixArt-Sigma",
309
+ "winner": "model_b",
310
+ "judge": "arena_user_127.0.0.1",
311
+ "anony": false,
312
+ "tstamp": 1735202083.631
313
+ },
314
+ {
315
+ "model_a": "GPT-4o + DALLE-3",
316
+ "model_b": "GPT-4o + PixArt-Sigma",
317
+ "winner": "model_a",
318
+ "judge": "arena_user_127.0.0.1",
319
+ "anony": false,
320
+ "tstamp": 1735202099.4377
321
+ },
322
+ {
323
+ "model_a": "GPT-4o + OmniGen",
324
+ "model_b": "ChatDiT",
325
+ "winner": "model_b",
326
+ "judge": "arena_user_127.0.0.1",
327
+ "anony": true,
328
+ "tstamp": 1735202132.8592
329
+ },
330
+ {
331
+ "model_a": "GPT-4o + DALLE-3",
332
+ "model_b": "GPT-4o + PixArt-Sigma",
333
+ "winner": "model_b",
334
+ "judge": "arena_user_127.0.0.1",
335
+ "anony": false,
336
+ "tstamp": 1735202545.8694
337
+ },
338
+ {
339
+ "model_a": "GPT-4o + DALLE-3",
340
+ "model_b": "GPT-4o + PixArt-Sigma",
341
+ "winner": "model_a",
342
+ "judge": "arena_user_127.0.0.1",
343
+ "anony": false,
344
+ "tstamp": 1735202565.5723
345
+ },
346
+ {
347
+ "model_a": "GPT-4o + DALLE-3",
348
+ "model_b": "GPT-4o + PixArt-Sigma",
349
+ "winner": "tie (bothbad)",
350
+ "judge": "arena_user_127.0.0.1",
351
+ "anony": false,
352
+ "tstamp": 1735202573.0118
353
+ },
354
+ {
355
+ "model_a": "GPT-4o + DALLE-3",
356
+ "model_b": "GPT-4o + PixArt-Sigma",
357
+ "winner": "tie (bothbad)",
358
+ "judge": "arena_user_127.0.0.1",
359
+ "anony": false,
360
+ "tstamp": 1735203523.809
361
+ },
362
+ {
363
+ "model_a": "GPT-4o + OmniGen",
364
+ "model_b": "GPT-4o + DALLE-3",
365
+ "winner": "model_b",
366
+ "judge": "arena_user_127.0.0.1",
367
+ "anony": true,
368
+ "tstamp": 1735205600.7414
369
+ },
370
+ {
371
+ "model_a": "ChatDiT",
372
+ "model_b": "GPT-4o + DALLE-3",
373
+ "winner": "model_a",
374
+ "judge": "arena_user_127.0.0.1",
375
+ "anony": true,
376
+ "tstamp": 1735207454.8251
377
+ },
378
+ {
379
+ "model_a": "GPT-4o + OmniGen",
380
+ "model_b": "GPT-4o + Stable Diffusion 3 Medium",
381
+ "winner": "model_b",
382
+ "judge": "arena_user_127.0.0.1",
383
+ "anony": true,
384
+ "tstamp": 1735207466.0131
385
+ },
386
+ {
387
+ "model_a": "GPT-4o + DALLE-3",
388
+ "model_b": "GPT-4o + Emu2",
389
+ "winner": "model_b",
390
+ "judge": "arena_user_127.0.0.1",
391
+ "anony": true,
392
+ "tstamp": 1735215923.1589
393
+ },
394
+ {
395
+ "model_a": "GPT-4o + PixArt-Sigma",
396
+ "model_b": "GPT-4o + DALLE-3",
397
+ "winner": "model_a",
398
+ "judge": "arena_user_127.0.0.1",
399
+ "anony": true,
400
+ "tstamp": 1735215935.7597
401
+ },
402
+ {
403
+ "model_a": "GPT-4o + OmniGen",
404
+ "model_b": "GPT-4o + PixArt-Sigma",
405
+ "winner": "tie (bothbad)",
406
+ "judge": "arena_user_127.0.0.1",
407
+ "anony": true,
408
+ "tstamp": 1735215942.7093
409
+ },
410
+ {
411
+ "model_a": "GPT-4o + PixArt-Sigma",
412
+ "model_b": "GPT-4o + OmniGen",
413
+ "winner": "model_a",
414
+ "judge": "arena_user_127.0.0.1",
415
+ "anony": true,
416
+ "tstamp": 1735215949.7965
417
+ },
418
+ {
419
+ "model_a": "GPT-4o + DALLE-3",
420
+ "model_b": "ChatDiT",
421
+ "winner": "model_b",
422
+ "judge": "arena_user_127.0.0.1",
423
+ "anony": true,
424
+ "tstamp": 1735215962.6898
425
+ },
426
+ {
427
+ "model_a": "GPT-4o + Stable Diffusion 3 Medium",
428
+ "model_b": "GPT-4o + DALLE-3",
429
+ "winner": "tie (bothbad)",
430
+ "judge": "arena_user_127.0.0.1",
431
+ "anony": true,
432
+ "tstamp": 1735215968.9052
433
+ },
434
+ {
435
+ "model_a": "GPT-4o + FLUX.1 [dev]",
436
+ "model_b": "GPT-4o + Stable Diffusion 3 Medium",
437
+ "winner": "tie (bothbad)",
438
+ "judge": "arena_user_127.0.0.1",
439
+ "anony": true,
440
+ "tstamp": 1735215976.5079
441
+ },
442
+ {
443
+ "model_a": "GPT-4o + Emu2",
444
+ "model_b": "GPT-4o + Stable Diffusion 3 Medium",
445
+ "winner": "model_b",
446
+ "judge": "arena_user_127.0.0.1",
447
+ "anony": true,
448
+ "tstamp": 1735215982.9709
449
+ },
450
+ {
451
+ "model_a": "ChatDiT",
452
+ "model_b": "GPT-4o + PixArt-Sigma",
453
+ "winner": "model_a",
454
+ "judge": "arena_user_127.0.0.1",
455
+ "anony": true,
456
+ "tstamp": 1735215993.2305
457
+ },
458
+ {
459
+ "model_a": "GPT-4o + Stable Diffusion 3 Medium",
460
+ "model_b": "GPT-4o + FLUX.1 [dev]",
461
+ "winner": "tie (bothbad)",
462
+ "judge": "arena_user_127.0.0.1",
463
+ "anony": true,
464
+ "tstamp": 1735215999.8713
465
+ },
466
+ {
467
+ "model_a": "GPT-4o + PixArt-Sigma",
468
+ "model_b": "GPT-4o + FLUX.1 [dev]",
469
+ "winner": "model_b",
470
+ "judge": "arena_user_127.0.0.1",
471
+ "anony": true,
472
+ "tstamp": 1735216012.8216
473
+ },
474
+ {
475
+ "model_a": "ChatDiT",
476
+ "model_b": "GPT-4o + PixArt-Sigma",
477
+ "winner": "model_a",
478
+ "judge": "arena_user_127.0.0.1",
479
+ "anony": true,
480
+ "tstamp": 1735216021.653
481
+ }
482
+ ]
arena_elo/results/20241226/elo_results.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:03f83c57c40a52e7cc67b47144cc16b5b21417baad81c270e84728cc223da347
3
+ size 59910
arena_elo/results/20241226/leaderboard.csv ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ key,Model,Arena Elo rating (anony),Arena Elo rating (full),license,creator,link
2
+ GPT-4o + FLUX.1 [dev],GPT-4o + FLUX.1 [dev],1175.6821194184254,1081.7818725315192,FLUX.1 [dev] Non-Commercial License,Black Forest Labs,https://huggingface.co/black-forest-labs/FLUX.1-dev
3
+ ChatDiT,ChatDiT,1162.3247570727874,1220.8633537889925,MIT License,Tongyi Lab,https://github.com/ali-vilab/ChatDiT
4
+ GPT-4o + Emu2,GPT-4o + Emu2,1134.6891079598486,1152.7284045727401,Apache License 2.0,BAAI,https://huggingface.co/BAAI/Emu2
5
+ FLUX-1,FLUX-1,1107.9942010514312,1165.8807578999263,N/A,N/A,N/A
6
+ GPT-4o + Stable Diffusion 3 Medium,GPT-4o + Stable Diffusion 3 Medium,1066.9600535574334,1033.9863968056318,Stability AI Community License,Stability AI,https://huggingface.co/stabilityai/stable-diffusion-3-medium
7
+ GPT-4o + PixArt-Sigma,GPT-4o + PixArt-Sigma,933.0714315244284,886.7730412848159,CreativeML Open RAIL++-M License,Huawei Noah's Ark Lab,https://huggingface.co/PixArt-alpha/PixArt-Sigma-XL-2-1024-MS
8
+ GPT-4o + DALLE-3,GPT-4o + DALLE-3,804.061447868383,852.071446157934,OpenAI Terms of Use,OpenAI,https://openai.com/index/dall-e-3/
9
+ GPT-4o + OmniGen,GPT-4o + OmniGen,615.2168815472633,605.9147269584403,MIT License,BAAI,https://huggingface.co/spaces/Shitao/OmniGen
arena_elo/results/latest/clean_battle.json ADDED
@@ -0,0 +1,482 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "model_a": "FLUX-1",
4
+ "model_b": "ChatDiT",
5
+ "winner": "model_b",
6
+ "judge": "arena_user_127.0.0.1",
7
+ "anony": true,
8
+ "tstamp": 1735030427.6669
9
+ },
10
+ {
11
+ "model_a": "FLUX-1",
12
+ "model_b": "ChatDiT",
13
+ "winner": "model_a",
14
+ "judge": "arena_user_127.0.0.1",
15
+ "anony": true,
16
+ "tstamp": 1735030452.0238
17
+ },
18
+ {
19
+ "model_a": "ChatDiT",
20
+ "model_b": "FLUX-1",
21
+ "winner": "model_a",
22
+ "judge": "arena_user_127.0.0.1",
23
+ "anony": true,
24
+ "tstamp": 1735030464.2602
25
+ },
26
+ {
27
+ "model_a": "ChatDiT",
28
+ "model_b": "FLUX-1",
29
+ "winner": "model_a",
30
+ "judge": "arena_user_127.0.0.1",
31
+ "anony": true,
32
+ "tstamp": 1735030476.2328
33
+ },
34
+ {
35
+ "model_a": "FLUX-1",
36
+ "model_b": "ChatDiT",
37
+ "winner": "tie (bothbad)",
38
+ "judge": "arena_user_127.0.0.1",
39
+ "anony": true,
40
+ "tstamp": 1735030495.2955
41
+ },
42
+ {
43
+ "model_a": "ChatDiT",
44
+ "model_b": "FLUX-1",
45
+ "winner": "tie (bothbad)",
46
+ "judge": "arena_user_127.0.0.1",
47
+ "anony": true,
48
+ "tstamp": 1735030503.418
49
+ },
50
+ {
51
+ "model_a": "ChatDiT",
52
+ "model_b": "FLUX-1",
53
+ "winner": "model_a",
54
+ "judge": "arena_user_127.0.0.1",
55
+ "anony": true,
56
+ "tstamp": 1735030511.3926
57
+ },
58
+ {
59
+ "model_a": "ChatDiT",
60
+ "model_b": "FLUX-1",
61
+ "winner": "tie (bothbad)",
62
+ "judge": "arena_user_127.0.0.1",
63
+ "anony": true,
64
+ "tstamp": 1735034259.9984
65
+ },
66
+ {
67
+ "model_a": "ChatDiT",
68
+ "model_b": "FLUX-1",
69
+ "winner": "model_a",
70
+ "judge": "arena_user_127.0.0.1",
71
+ "anony": true,
72
+ "tstamp": 1735034275.6871
73
+ },
74
+ {
75
+ "model_a": "ChatDiT",
76
+ "model_b": "FLUX-1",
77
+ "winner": "model_a",
78
+ "judge": "arena_user_127.0.0.1",
79
+ "anony": true,
80
+ "tstamp": 1735034284.7354
81
+ },
82
+ {
83
+ "model_a": "FLUX-1",
84
+ "model_b": "ChatDiT",
85
+ "winner": "model_a",
86
+ "judge": "arena_user_127.0.0.1",
87
+ "anony": true,
88
+ "tstamp": 1735034293.468
89
+ },
90
+ {
91
+ "model_a": "ChatDiT",
92
+ "model_b": "FLUX-1",
93
+ "winner": "model_b",
94
+ "judge": "arena_user_127.0.0.1",
95
+ "anony": true,
96
+ "tstamp": 1735034303.2042
97
+ },
98
+ {
99
+ "model_a": "ChatDiT",
100
+ "model_b": "FLUX-1",
101
+ "winner": "model_a",
102
+ "judge": "arena_user_127.0.0.1",
103
+ "anony": true,
104
+ "tstamp": 1735034314.1941
105
+ },
106
+ {
107
+ "model_a": "FLUX-1",
108
+ "model_b": "ChatDiT",
109
+ "winner": "model_a",
110
+ "judge": "arena_user_127.0.0.1",
111
+ "anony": true,
112
+ "tstamp": 1735034326.5092
113
+ },
114
+ {
115
+ "model_a": "FLUX-1",
116
+ "model_b": "ChatDiT",
117
+ "winner": "model_b",
118
+ "judge": "arena_user_127.0.0.1",
119
+ "anony": true,
120
+ "tstamp": 1735034331.6963
121
+ },
122
+ {
123
+ "model_a": "FLUX-1",
124
+ "model_b": "ChatDiT",
125
+ "winner": "tie (bothbad)",
126
+ "judge": "arena_user_127.0.0.1",
127
+ "anony": true,
128
+ "tstamp": 1735034336.5346
129
+ },
130
+ {
131
+ "model_a": "ChatDiT",
132
+ "model_b": "FLUX-1",
133
+ "winner": "model_b",
134
+ "judge": "arena_user_127.0.0.1",
135
+ "anony": true,
136
+ "tstamp": 1735034351.9521
137
+ },
138
+ {
139
+ "model_a": "FLUX-1",
140
+ "model_b": "ChatDiT",
141
+ "winner": "model_b",
142
+ "judge": "arena_user_127.0.0.1",
143
+ "anony": true,
144
+ "tstamp": 1735034366.1775
145
+ },
146
+ {
147
+ "model_a": "FLUX-1",
148
+ "model_b": "ChatDiT",
149
+ "winner": "model_a",
150
+ "judge": "arena_user_127.0.0.1",
151
+ "anony": true,
152
+ "tstamp": 1735034380.5877
153
+ },
154
+ {
155
+ "model_a": "ChatDiT",
156
+ "model_b": "FLUX-1",
157
+ "winner": "model_b",
158
+ "judge": "arena_user_127.0.0.1",
159
+ "anony": true,
160
+ "tstamp": 1735034384.3087
161
+ },
162
+ {
163
+ "model_a": "FLUX-1",
164
+ "model_b": "ChatDiT",
165
+ "winner": "model_a",
166
+ "judge": "arena_user_127.0.0.1",
167
+ "anony": true,
168
+ "tstamp": 1735034389.1583
169
+ },
170
+ {
171
+ "model_a": "FLUX-1",
172
+ "model_b": "ChatDiT",
173
+ "winner": "model_b",
174
+ "judge": "arena_user_127.0.0.1",
175
+ "anony": true,
176
+ "tstamp": 1735034405.9359
177
+ },
178
+ {
179
+ "model_a": "FLUX-1",
180
+ "model_b": "ChatDiT",
181
+ "winner": "model_b",
182
+ "judge": "arena_user_127.0.0.1",
183
+ "anony": true,
184
+ "tstamp": 1735034412.3533
185
+ },
186
+ {
187
+ "model_a": "FLUX-1",
188
+ "model_b": "ChatDiT",
189
+ "winner": "model_a",
190
+ "judge": "arena_user_127.0.0.1",
191
+ "anony": true,
192
+ "tstamp": 1735034419.0118
193
+ },
194
+ {
195
+ "model_a": "FLUX-1",
196
+ "model_b": "ChatDiT",
197
+ "winner": "model_b",
198
+ "judge": "arena_user_127.0.0.1",
199
+ "anony": true,
200
+ "tstamp": 1735034425.6972
201
+ },
202
+ {
203
+ "model_a": "FLUX-1",
204
+ "model_b": "ChatDiT",
205
+ "winner": "model_b",
206
+ "judge": "arena_user_127.0.0.1",
207
+ "anony": true,
208
+ "tstamp": 1735034432.5891
209
+ },
210
+ {
211
+ "model_a": "ChatDiT",
212
+ "model_b": "FLUX-1",
213
+ "winner": "model_a",
214
+ "judge": "arena_user_127.0.0.1",
215
+ "anony": true,
216
+ "tstamp": 1735092762.0
217
+ },
218
+ {
219
+ "model_a": "FLUX-1",
220
+ "model_b": "ChatDiT",
221
+ "winner": "tie (bothbad)",
222
+ "judge": "arena_user_127.0.0.1",
223
+ "anony": true,
224
+ "tstamp": 1735092774.618
225
+ },
226
+ {
227
+ "model_a": "FLUX-1",
228
+ "model_b": "ChatDiT",
229
+ "winner": "model_a",
230
+ "judge": "arena_user_127.0.0.1",
231
+ "anony": true,
232
+ "tstamp": 1735092797.2067
233
+ },
234
+ {
235
+ "model_a": "FLUX-1",
236
+ "model_b": "ChatDiT",
237
+ "winner": "model_b",
238
+ "judge": "arena_user_127.0.0.1",
239
+ "anony": true,
240
+ "tstamp": 1735092804.6699
241
+ },
242
+ {
243
+ "model_a": "FLUX-1",
244
+ "model_b": "ChatDiT",
245
+ "winner": "model_a",
246
+ "judge": "arena_user_127.0.0.1",
247
+ "anony": true,
248
+ "tstamp": 1735092810.2635
249
+ },
250
+ {
251
+ "model_a": "FLUX-1",
252
+ "model_b": "ChatDiT",
253
+ "winner": "model_b",
254
+ "judge": "arena_user_127.0.0.1",
255
+ "anony": true,
256
+ "tstamp": 1735093113.5724
257
+ },
258
+ {
259
+ "model_a": "ChatDiT",
260
+ "model_b": "FLUX-1",
261
+ "winner": "tie (bothbad)",
262
+ "judge": "arena_user_127.0.0.1",
263
+ "anony": true,
264
+ "tstamp": 1735093133.2436
265
+ },
266
+ {
267
+ "model_a": "GPT-4o + Stable Diffusion 3 Medium",
268
+ "model_b": "GPT-4o + OmniGen",
269
+ "winner": "model_a",
270
+ "judge": "arena_user_127.0.0.1",
271
+ "anony": true,
272
+ "tstamp": 1735187628.4881
273
+ },
274
+ {
275
+ "model_a": "GPT-4o + Stable Diffusion 3 Medium",
276
+ "model_b": "GPT-4o + PixArt-Sigma",
277
+ "winner": "model_b",
278
+ "judge": "arena_user_127.0.0.1",
279
+ "anony": true,
280
+ "tstamp": 1735187649.4872
281
+ },
282
+ {
283
+ "model_a": "GPT-4o + Emu2",
284
+ "model_b": "ChatDiT",
285
+ "winner": "model_a",
286
+ "judge": "arena_user_127.0.0.1",
287
+ "anony": true,
288
+ "tstamp": 1735197562.2637
289
+ },
290
+ {
291
+ "model_a": "GPT-4o + FLUX.1 [dev]",
292
+ "model_b": "GPT-4o + PixArt-Sigma",
293
+ "winner": "model_a",
294
+ "judge": "arena_user_127.0.0.1",
295
+ "anony": true,
296
+ "tstamp": 1735197586.8438
297
+ },
298
+ {
299
+ "model_a": "ChatDiT",
300
+ "model_b": "GPT-4o + FLUX.1 [dev]",
301
+ "winner": "model_a",
302
+ "judge": "arena_user_127.0.0.1",
303
+ "anony": false,
304
+ "tstamp": 1735201758.7145
305
+ },
306
+ {
307
+ "model_a": "GPT-4o + DALLE-3",
308
+ "model_b": "GPT-4o + PixArt-Sigma",
309
+ "winner": "model_b",
310
+ "judge": "arena_user_127.0.0.1",
311
+ "anony": false,
312
+ "tstamp": 1735202083.631
313
+ },
314
+ {
315
+ "model_a": "GPT-4o + DALLE-3",
316
+ "model_b": "GPT-4o + PixArt-Sigma",
317
+ "winner": "model_a",
318
+ "judge": "arena_user_127.0.0.1",
319
+ "anony": false,
320
+ "tstamp": 1735202099.4377
321
+ },
322
+ {
323
+ "model_a": "GPT-4o + OmniGen",
324
+ "model_b": "ChatDiT",
325
+ "winner": "model_b",
326
+ "judge": "arena_user_127.0.0.1",
327
+ "anony": true,
328
+ "tstamp": 1735202132.8592
329
+ },
330
+ {
331
+ "model_a": "GPT-4o + DALLE-3",
332
+ "model_b": "GPT-4o + PixArt-Sigma",
333
+ "winner": "model_b",
334
+ "judge": "arena_user_127.0.0.1",
335
+ "anony": false,
336
+ "tstamp": 1735202545.8694
337
+ },
338
+ {
339
+ "model_a": "GPT-4o + DALLE-3",
340
+ "model_b": "GPT-4o + PixArt-Sigma",
341
+ "winner": "model_a",
342
+ "judge": "arena_user_127.0.0.1",
343
+ "anony": false,
344
+ "tstamp": 1735202565.5723
345
+ },
346
+ {
347
+ "model_a": "GPT-4o + DALLE-3",
348
+ "model_b": "GPT-4o + PixArt-Sigma",
349
+ "winner": "tie (bothbad)",
350
+ "judge": "arena_user_127.0.0.1",
351
+ "anony": false,
352
+ "tstamp": 1735202573.0118
353
+ },
354
+ {
355
+ "model_a": "GPT-4o + DALLE-3",
356
+ "model_b": "GPT-4o + PixArt-Sigma",
357
+ "winner": "tie (bothbad)",
358
+ "judge": "arena_user_127.0.0.1",
359
+ "anony": false,
360
+ "tstamp": 1735203523.809
361
+ },
362
+ {
363
+ "model_a": "GPT-4o + OmniGen",
364
+ "model_b": "GPT-4o + DALLE-3",
365
+ "winner": "model_b",
366
+ "judge": "arena_user_127.0.0.1",
367
+ "anony": true,
368
+ "tstamp": 1735205600.7414
369
+ },
370
+ {
371
+ "model_a": "ChatDiT",
372
+ "model_b": "GPT-4o + DALLE-3",
373
+ "winner": "model_a",
374
+ "judge": "arena_user_127.0.0.1",
375
+ "anony": true,
376
+ "tstamp": 1735207454.8251
377
+ },
378
+ {
379
+ "model_a": "GPT-4o + OmniGen",
380
+ "model_b": "GPT-4o + Stable Diffusion 3 Medium",
381
+ "winner": "model_b",
382
+ "judge": "arena_user_127.0.0.1",
383
+ "anony": true,
384
+ "tstamp": 1735207466.0131
385
+ },
386
+ {
387
+ "model_a": "GPT-4o + DALLE-3",
388
+ "model_b": "GPT-4o + Emu2",
389
+ "winner": "model_b",
390
+ "judge": "arena_user_127.0.0.1",
391
+ "anony": true,
392
+ "tstamp": 1735215923.1589
393
+ },
394
+ {
395
+ "model_a": "GPT-4o + PixArt-Sigma",
396
+ "model_b": "GPT-4o + DALLE-3",
397
+ "winner": "model_a",
398
+ "judge": "arena_user_127.0.0.1",
399
+ "anony": true,
400
+ "tstamp": 1735215935.7597
401
+ },
402
+ {
403
+ "model_a": "GPT-4o + OmniGen",
404
+ "model_b": "GPT-4o + PixArt-Sigma",
405
+ "winner": "tie (bothbad)",
406
+ "judge": "arena_user_127.0.0.1",
407
+ "anony": true,
408
+ "tstamp": 1735215942.7093
409
+ },
410
+ {
411
+ "model_a": "GPT-4o + PixArt-Sigma",
412
+ "model_b": "GPT-4o + OmniGen",
413
+ "winner": "model_a",
414
+ "judge": "arena_user_127.0.0.1",
415
+ "anony": true,
416
+ "tstamp": 1735215949.7965
417
+ },
418
+ {
419
+ "model_a": "GPT-4o + DALLE-3",
420
+ "model_b": "ChatDiT",
421
+ "winner": "model_b",
422
+ "judge": "arena_user_127.0.0.1",
423
+ "anony": true,
424
+ "tstamp": 1735215962.6898
425
+ },
426
+ {
427
+ "model_a": "GPT-4o + Stable Diffusion 3 Medium",
428
+ "model_b": "GPT-4o + DALLE-3",
429
+ "winner": "tie (bothbad)",
430
+ "judge": "arena_user_127.0.0.1",
431
+ "anony": true,
432
+ "tstamp": 1735215968.9052
433
+ },
434
+ {
435
+ "model_a": "GPT-4o + FLUX.1 [dev]",
436
+ "model_b": "GPT-4o + Stable Diffusion 3 Medium",
437
+ "winner": "tie (bothbad)",
438
+ "judge": "arena_user_127.0.0.1",
439
+ "anony": true,
440
+ "tstamp": 1735215976.5079
441
+ },
442
+ {
443
+ "model_a": "GPT-4o + Emu2",
444
+ "model_b": "GPT-4o + Stable Diffusion 3 Medium",
445
+ "winner": "model_b",
446
+ "judge": "arena_user_127.0.0.1",
447
+ "anony": true,
448
+ "tstamp": 1735215982.9709
449
+ },
450
+ {
451
+ "model_a": "ChatDiT",
452
+ "model_b": "GPT-4o + PixArt-Sigma",
453
+ "winner": "model_a",
454
+ "judge": "arena_user_127.0.0.1",
455
+ "anony": true,
456
+ "tstamp": 1735215993.2305
457
+ },
458
+ {
459
+ "model_a": "GPT-4o + Stable Diffusion 3 Medium",
460
+ "model_b": "GPT-4o + FLUX.1 [dev]",
461
+ "winner": "tie (bothbad)",
462
+ "judge": "arena_user_127.0.0.1",
463
+ "anony": true,
464
+ "tstamp": 1735215999.8713
465
+ },
466
+ {
467
+ "model_a": "GPT-4o + PixArt-Sigma",
468
+ "model_b": "GPT-4o + FLUX.1 [dev]",
469
+ "winner": "model_b",
470
+ "judge": "arena_user_127.0.0.1",
471
+ "anony": true,
472
+ "tstamp": 1735216012.8216
473
+ },
474
+ {
475
+ "model_a": "ChatDiT",
476
+ "model_b": "GPT-4o + PixArt-Sigma",
477
+ "winner": "model_a",
478
+ "judge": "arena_user_127.0.0.1",
479
+ "anony": true,
480
+ "tstamp": 1735216021.653
481
+ }
482
+ ]
arena_elo/results/latest/elo_results.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:03f83c57c40a52e7cc67b47144cc16b5b21417baad81c270e84728cc223da347
3
+ size 59910
arena_elo/results/latest/leaderboard.csv ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ key,Model,Arena Elo rating (anony),Arena Elo rating (full),license,creator,link
2
+ GPT-4o + FLUX.1 [dev],GPT-4o + FLUX.1 [dev],1175.6821194184254,1081.7818725315192,FLUX.1 [dev] Non-Commercial License,Black Forest Labs,https://huggingface.co/black-forest-labs/FLUX.1-dev
3
+ ChatDiT,ChatDiT,1162.3247570727874,1220.8633537889925,MIT License,Tongyi Lab,https://github.com/ali-vilab/ChatDiT
4
+ GPT-4o + Emu2,GPT-4o + Emu2,1134.6891079598486,1152.7284045727401,Apache License 2.0,BAAI,https://huggingface.co/BAAI/Emu2
5
+ FLUX-1,FLUX-1,1107.9942010514312,1165.8807578999263,N/A,N/A,N/A
6
+ GPT-4o + Stable Diffusion 3 Medium,GPT-4o + Stable Diffusion 3 Medium,1066.9600535574334,1033.9863968056318,Stability AI Community License,Stability AI,https://huggingface.co/stabilityai/stable-diffusion-3-medium
7
+ GPT-4o + PixArt-Sigma,GPT-4o + PixArt-Sigma,933.0714315244284,886.7730412848159,CreativeML Open RAIL++-M License,Huawei Noah's Ark Lab,https://huggingface.co/PixArt-alpha/PixArt-Sigma-XL-2-1024-MS
8
+ GPT-4o + DALLE-3,GPT-4o + DALLE-3,804.061447868383,852.071446157934,OpenAI Terms of Use,OpenAI,https://openai.com/index/dall-e-3/
9
+ GPT-4o + OmniGen,GPT-4o + OmniGen,615.2168815472633,605.9147269584403,MIT License,BAAI,https://huggingface.co/spaces/Shitao/OmniGen
arena_elo/simple_test.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pickle
2
+ with open("./results/latest/elo_results.pkl",'rb') as f:
3
+ data = pickle.load(f)
4
+ print()
5
+ df = data["anony"]["leaderboard_table_df"]
6
+ # sort by rating
7
+ df = df.sort_values(by=["rating"], ascending=False)
8
+ print(df)
9
+
10
+ print()
11
+
12
+ df = data["full"]["leaderboard_table_df"]
13
+ # sort by rating
14
+ df = df.sort_values(by=["rating"], ascending=False)
15
+ print(df)
16
+ print('done')
arena_elo/update_elo_rating.sh ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # set LOGDIR to default if not set before
2
+ if [ -z "$LOGDIR" ]; then
3
+ echo "LOGDIR is not set. Using default '../logs/vote_log'"
4
+ export LOGDIR="../logs/vote_log"
5
+ fi
6
+
7
+ mkdir -p results
8
+
9
+ # for battle data
10
+ python -m elo_rating.clean_battle_data
11
+ battle_cutoff_date=`cat cut_off_date.txt` && rm cut_off_date.txt && echo "Battle data last updated on $battle_cutoff_date"
12
+
13
+ mkdir -p ./results/$battle_cutoff_date
14
+
15
+ cp clean_battle_$battle_cutoff_date.json ./results/latest/clean_battle.json
16
+ mv clean_battle_$battle_cutoff_date.json ./results/$battle_cutoff_date/clean_battle.json
17
+
18
+ python -m elo_rating.elo_analysis --clean-battle-file ./results/$battle_cutoff_date/clean_battle.json
19
+ mv ./elo_results_$battle_cutoff_date.pkl ./results/$battle_cutoff_date/elo_results.pkl
20
+
21
+ # generate the leaderboard
22
+
23
+ python -m elo_rating.generate_leaderboard \
24
+ --model_info_file "../model/model_registry.csv" \
25
+ --elo_rating_pkl "./results/$battle_cutoff_date/elo_results.pkl" \
26
+ --output_csv "./results/$battle_cutoff_date/leaderboard.csv"
27
+
28
+ mkdir -p ./results/latest
29
+ cp ./results/$battle_cutoff_date/leaderboard.csv ./results/latest/leaderboard.csv
30
+ cp ./results/$battle_cutoff_date/elo_results.pkl ./results/latest/elo_results.pkl
arena_elo/win_fraction_heatmap.png ADDED
logs/vote_log/2024-12-24-conv.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"tstamp": 1735030427.6669, "type": "rightvote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
2
+ {"tstamp": 1735030452.0238, "type": "leftvote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
3
+ {"tstamp": 1735030464.2602, "type": "leftvote", "models": ["ChatDiT", "FLUX-1"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
4
+ {"tstamp": 1735030476.2328, "type": "leftvote", "models": ["ChatDiT", "FLUX-1"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
5
+ {"tstamp": 1735030495.2955, "type": "bothbad_vote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
6
+ {"tstamp": 1735030503.418, "type": "bothbad_vote", "models": ["ChatDiT", "FLUX-1"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
7
+ {"tstamp": 1735030511.3926, "type": "leftvote", "models": ["ChatDiT", "FLUX-1"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
8
+ {"tstamp": 1735034259.9984, "type": "bothbad_vote", "models": ["ChatDiT", "FLUX-1"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
9
+ {"tstamp": 1735034275.6871, "type": "leftvote", "models": ["ChatDiT", "FLUX-1"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
10
+ {"tstamp": 1735034284.7354, "type": "leftvote", "models": ["ChatDiT", "FLUX-1"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
11
+ {"tstamp": 1735034293.468, "type": "leftvote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
12
+ {"tstamp": 1735034303.2042, "type": "rightvote", "models": ["ChatDiT", "FLUX-1"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
13
+ {"tstamp": 1735034314.1941, "type": "leftvote", "models": ["ChatDiT", "FLUX-1"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
14
+ {"tstamp": 1735034326.5092, "type": "leftvote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
15
+ {"tstamp": 1735034331.6963, "type": "rightvote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
16
+ {"tstamp": 1735034336.5346, "type": "bothbad_vote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
17
+ {"tstamp": 1735034351.9521, "type": "rightvote", "models": ["ChatDiT", "FLUX-1"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
18
+ {"tstamp": 1735034366.1775, "type": "rightvote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
19
+ {"tstamp": 1735034380.5877, "type": "leftvote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
20
+ {"tstamp": 1735034384.3087, "type": "rightvote", "models": ["ChatDiT", "FLUX-1"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
21
+ {"tstamp": 1735034389.1583, "type": "leftvote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
22
+ {"tstamp": 1735034405.9359, "type": "rightvote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
23
+ {"tstamp": 1735034412.3533, "type": "rightvote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
24
+ {"tstamp": 1735034419.0118, "type": "leftvote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
25
+ {"tstamp": 1735034425.6972, "type": "rightvote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
26
+ {"tstamp": 1735034432.5891, "type": "rightvote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
logs/vote_log/2024-12-25-conv.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {"tstamp": 1735092762.0, "type": "leftvote", "models": ["ChatDiT", "FLUX-1"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
2
+ {"tstamp": 1735092774.618, "type": "bothbad_vote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
3
+ {"tstamp": 1735092797.2067, "type": "leftvote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
4
+ {"tstamp": 1735092804.6699, "type": "rightvote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
5
+ {"tstamp": 1735092810.2635, "type": "leftvote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
6
+ {"tstamp": 1735093113.5724, "type": "rightvote", "models": ["FLUX-1", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
7
+ {"tstamp": 1735093133.2436, "type": "bothbad_vote", "models": ["ChatDiT", "FLUX-1"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
logs/vote_log/2024-12-26-conv.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"tstamp": 1735187628.4881, "type": "leftvote", "models": ["GPT-4o + Stable Diffusion 3 Medium", "GPT-4o + OmniGen"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
2
+ {"tstamp": 1735187649.4872, "type": "rightvote", "models": ["GPT-4o + Stable Diffusion 3 Medium", "GPT-4o + PixArt-Sigma"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
3
+ {"tstamp": 1735197562.2637, "type": "leftvote", "models": ["GPT-4o + Emu2", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
4
+ {"tstamp": 1735197586.8438, "type": "leftvote", "models": ["GPT-4o + FLUX.1 [dev]", "GPT-4o + PixArt-Sigma"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
5
+ {"tstamp": 1735201758.7145, "type": "leftvote", "models": ["ChatDiT", "GPT-4o + FLUX.1 [dev]"], "states": [{}, {}], "anony": false, "ip": "127.0.0.1"}
6
+ {"tstamp": 1735202083.631, "type": "rightvote", "models": ["GPT-4o + DALLE-3", "GPT-4o + PixArt-Sigma"], "states": [{}, {}], "anony": false, "ip": "127.0.0.1"}
7
+ {"tstamp": 1735202099.4377, "type": "leftvote", "models": ["GPT-4o + DALLE-3", "GPT-4o + PixArt-Sigma"], "states": [{}, {}], "anony": false, "ip": "127.0.0.1"}
8
+ {"tstamp": 1735202132.8592, "type": "rightvote", "models": ["GPT-4o + OmniGen", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
9
+ {"tstamp": 1735202545.8694, "type": "rightvote", "models": ["GPT-4o + DALLE-3", "GPT-4o + PixArt-Sigma"], "states": [{}, {}], "anony": false, "ip": "127.0.0.1"}
10
+ {"tstamp": 1735202565.5723, "type": "leftvote", "models": ["GPT-4o + DALLE-3", "GPT-4o + PixArt-Sigma"], "states": [{}, {}], "anony": false, "ip": "127.0.0.1"}
11
+ {"tstamp": 1735202573.0118, "type": "bothbad_vote", "models": ["GPT-4o + DALLE-3", "GPT-4o + PixArt-Sigma"], "states": [{}, {}], "anony": false, "ip": "127.0.0.1"}
12
+ {"tstamp": 1735203523.809, "type": "bothbad_vote", "models": ["GPT-4o + DALLE-3", "GPT-4o + PixArt-Sigma"], "states": [{}, {}], "anony": false, "ip": "127.0.0.1"}
13
+ {"tstamp": 1735205600.7414, "type": "rightvote", "models": ["GPT-4o + OmniGen", "GPT-4o + DALLE-3"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
14
+ {"tstamp": 1735207454.8251, "type": "leftvote", "models": ["ChatDiT", "GPT-4o + DALLE-3"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
15
+ {"tstamp": 1735207466.0131, "type": "rightvote", "models": ["GPT-4o + OmniGen", "GPT-4o + Stable Diffusion 3 Medium"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
16
+ {"tstamp": 1735215923.1589, "type": "rightvote", "models": ["GPT-4o + DALLE-3", "GPT-4o + Emu2"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
17
+ {"tstamp": 1735215935.7597, "type": "leftvote", "models": ["GPT-4o + PixArt-Sigma", "GPT-4o + DALLE-3"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
18
+ {"tstamp": 1735215942.7093, "type": "bothbad_vote", "models": ["GPT-4o + OmniGen", "GPT-4o + PixArt-Sigma"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
19
+ {"tstamp": 1735215949.7965, "type": "leftvote", "models": ["GPT-4o + PixArt-Sigma", "GPT-4o + OmniGen"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
20
+ {"tstamp": 1735215962.6898, "type": "rightvote", "models": ["GPT-4o + DALLE-3", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
21
+ {"tstamp": 1735215968.9052, "type": "bothbad_vote", "models": ["GPT-4o + Stable Diffusion 3 Medium", "GPT-4o + DALLE-3"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
22
+ {"tstamp": 1735215976.5079, "type": "bothbad_vote", "models": ["GPT-4o + FLUX.1 [dev]", "GPT-4o + Stable Diffusion 3 Medium"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
23
+ {"tstamp": 1735215982.9709, "type": "rightvote", "models": ["GPT-4o + Emu2", "GPT-4o + Stable Diffusion 3 Medium"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
24
+ {"tstamp": 1735215993.2305, "type": "leftvote", "models": ["ChatDiT", "GPT-4o + PixArt-Sigma"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
25
+ {"tstamp": 1735215999.8713, "type": "bothbad_vote", "models": ["GPT-4o + Stable Diffusion 3 Medium", "GPT-4o + FLUX.1 [dev]"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
26
+ {"tstamp": 1735216012.8216, "type": "rightvote", "models": ["GPT-4o + PixArt-Sigma", "GPT-4o + FLUX.1 [dev]"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
27
+ {"tstamp": 1735216021.653, "type": "leftvote", "models": ["ChatDiT", "GPT-4o + PixArt-Sigma"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
logs/vote_log/2024-12-27-conv.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {"tstamp": 1735286354.5764, "type": "rightvote", "models": ["GPT-4o + PixArt-Sigma", "GPT-4o + OmniGen"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
2
+ {"tstamp": 1735286365.2329, "type": "bothbad_vote", "models": ["GPT-4o + Stable Diffusion 3 Medium", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
3
+ {"tstamp": 1735286374.6751, "type": "leftvote", "models": ["GPT-4o + Emu2", "ChatDiT"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
4
+ {"tstamp": 1735286382.1211, "type": "leftvote", "models": ["GPT-4o + FLUX.1 [dev]", "GPT-4o + Emu2"], "states": [{}, {}], "anony": true, "ip": "127.0.0.1"}
logs/vote_log/gr_web_image_generation_multi.log ADDED
@@ -0,0 +1,450 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-12-26 11:49:30 | INFO | stdout | An error occurred while loading the CSV file: 'utf-8' codec can't decode byte 0xa1 in position 1240: invalid start byte
2
+ 2024-12-26 11:49:31 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
3
+ 2024-12-26 11:49:31 | INFO | stdout |
4
+ 2024-12-26 11:49:31 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
5
+ 2024-12-26 11:49:38 | INFO | stdout | Keyboard interruption in main thread... closing server.
6
+ 2024-12-26 11:49:38 | INFO | stdout | Rerunning server... use `close()` to stop if you need to change `launch()` parameters.
7
+ 2024-12-26 11:49:38 | INFO | stdout | ----
8
+ 2024-12-26 11:49:38 | ERROR | stderr | Traceback (most recent call last):
9
+ 2024-12-26 11:49:38 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 65, in <module>
10
+ 2024-12-26 11:49:38 | ERROR | stderr | demo.launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
11
+ 2024-12-26 11:49:38 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2619, in launch
12
+ 2024-12-26 11:49:38 | ERROR | stderr | raise ValueError(
13
+ 2024-12-26 11:49:38 | ERROR | stderr | ValueError: When localhost is not accessible, a shareable link must be created. Please set share=True or check your proxy settings to allow access to localhost.
14
+ 2024-12-26 11:49:42 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
15
+ 2024-12-26 11:49:42 | ERROR | stderr | Traceback (most recent call last):
16
+ 2024-12-26 11:49:42 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
17
+ 2024-12-26 11:49:42 | ERROR | stderr | lock.acquire()
18
+ 2024-12-26 11:49:42 | ERROR | stderr | KeyboardInterrupt:
19
+ 2024-12-26 12:07:12 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
20
+ 2024-12-26 12:07:12 | INFO | stdout |
21
+ 2024-12-26 12:07:12 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
22
+ 2024-12-26 12:33:48 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
23
+ 2024-12-26 12:34:09 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
24
+ 2024-12-26 12:43:31 | INFO | stdout | Keyboard interruption in main thread... closing server.
25
+ 2024-12-26 12:43:31 | INFO | stdout | Rerunning server... use `close()` to stop if you need to change `launch()` parameters.
26
+ 2024-12-26 12:43:31 | INFO | stdout | ----
27
+ 2024-12-26 12:43:31 | ERROR | stderr | Traceback (most recent call last):
28
+ 2024-12-26 12:43:31 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 65, in <module>
29
+ 2024-12-26 12:43:31 | ERROR | stderr | demo.launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
30
+ 2024-12-26 12:43:31 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2619, in launch
31
+ 2024-12-26 12:43:31 | ERROR | stderr | raise ValueError(
32
+ 2024-12-26 12:43:31 | ERROR | stderr | ValueError: When localhost is not accessible, a shareable link must be created. Please set share=True or check your proxy settings to allow access to localhost.
33
+ 2024-12-26 14:01:35 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
34
+ 2024-12-26 14:01:36 | INFO | stdout |
35
+ 2024-12-26 14:01:36 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
36
+ 2024-12-26 14:05:04 | INFO | stdout | Keyboard interruption in main thread... closing server.
37
+ 2024-12-26 14:05:04 | ERROR | stderr | Traceback (most recent call last):
38
+ 2024-12-26 14:05:04 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
39
+ 2024-12-26 14:05:04 | ERROR | stderr | time.sleep(0.1)
40
+ 2024-12-26 14:05:04 | ERROR | stderr | KeyboardInterrupt
41
+ 2024-12-26 14:05:04 | ERROR | stderr |
42
+ 2024-12-26 14:05:04 | ERROR | stderr | During handling of the above exception, another exception occurred:
43
+ 2024-12-26 14:05:04 | ERROR | stderr |
44
+ 2024-12-26 14:05:04 | ERROR | stderr | Traceback (most recent call last):
45
+ 2024-12-26 14:05:04 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 64, in <module>
46
+ 2024-12-26 14:05:04 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
47
+ 2024-12-26 14:05:04 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
48
+ 2024-12-26 14:05:04 | ERROR | stderr | self.block_thread()
49
+ 2024-12-26 14:05:04 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
50
+ 2024-12-26 14:05:04 | ERROR | stderr | self.server.close()
51
+ 2024-12-26 14:05:04 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
52
+ 2024-12-26 14:05:04 | ERROR | stderr | self.thread.join(timeout=5)
53
+ 2024-12-26 14:05:04 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
54
+ 2024-12-26 14:05:04 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
55
+ 2024-12-26 14:05:04 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
56
+ 2024-12-26 14:05:04 | ERROR | stderr | if lock.acquire(block, timeout):
57
+ 2024-12-26 14:05:04 | ERROR | stderr | KeyboardInterrupt
58
+ 2024-12-26 14:17:14 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
59
+ 2024-12-26 14:17:14 | INFO | stdout |
60
+ 2024-12-26 14:17:14 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
61
+ 2024-12-26 14:21:13 | INFO | stdout | Keyboard interruption in main thread... closing server.
62
+ 2024-12-26 14:21:13 | ERROR | stderr | Traceback (most recent call last):
63
+ 2024-12-26 14:21:13 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
64
+ 2024-12-26 14:21:13 | ERROR | stderr | time.sleep(0.1)
65
+ 2024-12-26 14:21:13 | ERROR | stderr | KeyboardInterrupt
66
+ 2024-12-26 14:21:13 | ERROR | stderr |
67
+ 2024-12-26 14:21:13 | ERROR | stderr | During handling of the above exception, another exception occurred:
68
+ 2024-12-26 14:21:13 | ERROR | stderr |
69
+ 2024-12-26 14:21:13 | ERROR | stderr | Traceback (most recent call last):
70
+ 2024-12-26 14:21:13 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 64, in <module>
71
+ 2024-12-26 14:21:13 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
72
+ 2024-12-26 14:21:13 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
73
+ 2024-12-26 14:21:13 | ERROR | stderr | self.block_thread()
74
+ 2024-12-26 14:21:13 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
75
+ 2024-12-26 14:21:13 | ERROR | stderr | self.server.close()
76
+ 2024-12-26 14:21:13 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
77
+ 2024-12-26 14:21:13 | ERROR | stderr | self.thread.join(timeout=5)
78
+ 2024-12-26 14:21:13 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
79
+ 2024-12-26 14:21:13 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
80
+ 2024-12-26 14:21:13 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
81
+ 2024-12-26 14:21:13 | ERROR | stderr | if lock.acquire(block, timeout):
82
+ 2024-12-26 14:21:13 | ERROR | stderr | KeyboardInterrupt
83
+ 2024-12-26 15:14:17 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
84
+ 2024-12-26 15:14:17 | INFO | stdout |
85
+ 2024-12-26 15:14:17 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
86
+ 2024-12-26 15:19:22 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
87
+ 2024-12-26 15:19:46 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
88
+ 2024-12-26 15:20:35 | ERROR | stderr | Traceback (most recent call last):
89
+ 2024-12-26 15:20:35 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/queueing.py", line 625, in process_events
90
+ 2024-12-26 15:20:35 | ERROR | stderr | response = await route_utils.call_process_api(
91
+ 2024-12-26 15:20:35 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/route_utils.py", line 322, in call_process_api
92
+ 2024-12-26 15:20:35 | ERROR | stderr | output = await app.get_blocks().process_api(
93
+ 2024-12-26 15:20:35 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2047, in process_api
94
+ 2024-12-26 15:20:35 | ERROR | stderr | result = await self.call_function(
95
+ 2024-12-26 15:20:35 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 1594, in call_function
96
+ 2024-12-26 15:20:35 | ERROR | stderr | prediction = await anyio.to_thread.run_sync( # type: ignore
97
+ 2024-12-26 15:20:35 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/anyio/to_thread.py", line 56, in run_sync
98
+ 2024-12-26 15:20:35 | ERROR | stderr | return await get_async_backend().run_sync_in_worker_thread(
99
+ 2024-12-26 15:20:35 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 2505, in run_sync_in_worker_thread
100
+ 2024-12-26 15:20:35 | ERROR | stderr | return await future
101
+ 2024-12-26 15:20:35 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 1005, in run
102
+ 2024-12-26 15:20:35 | ERROR | stderr | result = context.run(func, *args)
103
+ 2024-12-26 15:20:35 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/utils.py", line 869, in wrapper
104
+ 2024-12-26 15:20:35 | ERROR | stderr | response = f(*args, **kwargs)
105
+ 2024-12-26 15:20:35 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/serve/utils.py", line 164, in refresh_side_by_side
106
+ 2024-12-26 15:20:35 | ERROR | stderr | state0, state1, prompt, input_images, output_images_A, output_images_B = models.get_result_of_random_case(model_name_A, model_name_B)
107
+ 2024-12-26 15:20:35 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/model/model_manager.py", line 215, in get_result_of_random_case
108
+ 2024-12-26 15:20:35 | ERROR | stderr | output_images_A = model_A.get_result(case_name)
109
+ 2024-12-26 15:20:35 | ERROR | stderr | AttributeError: 'NoneType' object has no attribute 'get_result'
110
+ 2024-12-26 16:22:06 | INFO | stdout | Keyboard interruption in main thread... closing server.
111
+ 2024-12-26 16:22:07 | ERROR | stderr | Traceback (most recent call last):
112
+ 2024-12-26 16:22:07 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
113
+ 2024-12-26 16:22:07 | ERROR | stderr | time.sleep(0.1)
114
+ 2024-12-26 16:22:07 | ERROR | stderr | KeyboardInterrupt
115
+ 2024-12-26 16:22:07 | ERROR | stderr |
116
+ 2024-12-26 16:22:07 | ERROR | stderr | During handling of the above exception, another exception occurred:
117
+ 2024-12-26 16:22:07 | ERROR | stderr |
118
+ 2024-12-26 16:22:07 | ERROR | stderr | Traceback (most recent call last):
119
+ 2024-12-26 16:22:07 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 64, in <module>
120
+ 2024-12-26 16:22:07 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
121
+ 2024-12-26 16:22:07 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
122
+ 2024-12-26 16:22:07 | ERROR | stderr | self.block_thread()
123
+ 2024-12-26 16:22:07 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
124
+ 2024-12-26 16:22:07 | ERROR | stderr | self.server.close()
125
+ 2024-12-26 16:22:07 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
126
+ 2024-12-26 16:22:07 | ERROR | stderr | self.thread.join(timeout=5)
127
+ 2024-12-26 16:22:07 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
128
+ 2024-12-26 16:22:07 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
129
+ 2024-12-26 16:22:07 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
130
+ 2024-12-26 16:22:07 | ERROR | stderr | if lock.acquire(block, timeout):
131
+ 2024-12-26 16:22:07 | ERROR | stderr | KeyboardInterrupt
132
+ 2024-12-26 16:28:41 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
133
+ 2024-12-26 16:28:41 | INFO | stdout |
134
+ 2024-12-26 16:28:41 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
135
+ 2024-12-26 16:29:18 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
136
+ 2024-12-26 16:33:49 | INFO | stdout | Keyboard interruption in main thread... closing server.
137
+ 2024-12-26 16:33:49 | ERROR | stderr | Traceback (most recent call last):
138
+ 2024-12-26 16:33:49 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
139
+ 2024-12-26 16:33:49 | ERROR | stderr | time.sleep(0.1)
140
+ 2024-12-26 16:33:49 | ERROR | stderr | KeyboardInterrupt
141
+ 2024-12-26 16:33:49 | ERROR | stderr |
142
+ 2024-12-26 16:33:49 | ERROR | stderr | During handling of the above exception, another exception occurred:
143
+ 2024-12-26 16:33:49 | ERROR | stderr |
144
+ 2024-12-26 16:33:49 | ERROR | stderr | Traceback (most recent call last):
145
+ 2024-12-26 16:33:49 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 64, in <module>
146
+ 2024-12-26 16:33:49 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
147
+ 2024-12-26 16:33:49 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
148
+ 2024-12-26 16:33:49 | ERROR | stderr | self.block_thread()
149
+ 2024-12-26 16:33:49 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
150
+ 2024-12-26 16:33:49 | ERROR | stderr | self.server.close()
151
+ 2024-12-26 16:33:49 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
152
+ 2024-12-26 16:33:49 | ERROR | stderr | self.thread.join(timeout=5)
153
+ 2024-12-26 16:33:49 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
154
+ 2024-12-26 16:33:49 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
155
+ 2024-12-26 16:33:49 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
156
+ 2024-12-26 16:33:49 | ERROR | stderr | if lock.acquire(block, timeout):
157
+ 2024-12-26 16:33:49 | ERROR | stderr | KeyboardInterrupt
158
+ 2024-12-26 16:34:06 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
159
+ 2024-12-26 16:34:06 | INFO | stdout |
160
+ 2024-12-26 16:34:06 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
161
+ 2024-12-26 16:34:43 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
162
+ 2024-12-26 16:34:59 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
163
+ 2024-12-26 16:35:32 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
164
+ 2024-12-26 16:42:25 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
165
+ 2024-12-26 16:42:45 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
166
+ 2024-12-26 16:42:53 | INFO | gradio_web_server_image_generation_multi | bothbad_vote (named). ip: 127.0.0.1
167
+ 2024-12-26 16:58:43 | INFO | gradio_web_server_image_generation_multi | bothbad_vote (named). ip: 127.0.0.1
168
+ 2024-12-26 17:33:20 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
169
+ 2024-12-26 18:04:14 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
170
+ 2024-12-26 18:04:26 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
171
+ 2024-12-26 19:58:21 | INFO | stdout | Keyboard interruption in main thread... closing server.
172
+ 2024-12-26 19:58:21 | ERROR | stderr | Traceback (most recent call last):
173
+ 2024-12-26 19:58:21 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
174
+ 2024-12-26 19:58:21 | ERROR | stderr | time.sleep(0.1)
175
+ 2024-12-26 19:58:21 | ERROR | stderr | KeyboardInterrupt
176
+ 2024-12-26 19:58:21 | ERROR | stderr |
177
+ 2024-12-26 19:58:21 | ERROR | stderr | During handling of the above exception, another exception occurred:
178
+ 2024-12-26 19:58:21 | ERROR | stderr |
179
+ 2024-12-26 19:58:21 | ERROR | stderr | Traceback (most recent call last):
180
+ 2024-12-26 19:58:21 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 64, in <module>
181
+ 2024-12-26 19:58:21 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
182
+ 2024-12-26 19:58:21 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
183
+ 2024-12-26 19:58:21 | ERROR | stderr | self.block_thread()
184
+ 2024-12-26 19:58:21 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
185
+ 2024-12-26 19:58:21 | ERROR | stderr | self.server.close()
186
+ 2024-12-26 19:58:21 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
187
+ 2024-12-26 19:58:21 | ERROR | stderr | self.thread.join(timeout=5)
188
+ 2024-12-26 19:58:21 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
189
+ 2024-12-26 19:58:21 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
190
+ 2024-12-26 19:58:21 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
191
+ 2024-12-26 19:58:21 | ERROR | stderr | if lock.acquire(block, timeout):
192
+ 2024-12-26 19:58:21 | ERROR | stderr | KeyboardInterrupt
193
+ 2024-12-26 20:07:22 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
194
+ 2024-12-26 20:07:22 | INFO | stdout |
195
+ 2024-12-26 20:07:22 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
196
+ 2024-12-26 20:09:29 | INFO | stdout | Keyboard interruption in main thread... closing server.
197
+ 2024-12-26 20:09:29 | ERROR | stderr | Traceback (most recent call last):
198
+ 2024-12-26 20:09:29 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
199
+ 2024-12-26 20:09:29 | ERROR | stderr | time.sleep(0.1)
200
+ 2024-12-26 20:09:29 | ERROR | stderr | KeyboardInterrupt
201
+ 2024-12-26 20:09:29 | ERROR | stderr |
202
+ 2024-12-26 20:09:29 | ERROR | stderr | During handling of the above exception, another exception occurred:
203
+ 2024-12-26 20:09:29 | ERROR | stderr |
204
+ 2024-12-26 20:09:29 | ERROR | stderr | Traceback (most recent call last):
205
+ 2024-12-26 20:09:29 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 64, in <module>
206
+ 2024-12-26 20:09:29 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
207
+ 2024-12-26 20:09:29 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
208
+ 2024-12-26 20:09:29 | ERROR | stderr | self.block_thread()
209
+ 2024-12-26 20:09:29 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
210
+ 2024-12-26 20:09:29 | ERROR | stderr | self.server.close()
211
+ 2024-12-26 20:09:29 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
212
+ 2024-12-26 20:09:29 | ERROR | stderr | self.thread.join(timeout=5)
213
+ 2024-12-26 20:09:29 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
214
+ 2024-12-26 20:09:29 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
215
+ 2024-12-26 20:09:29 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
216
+ 2024-12-26 20:09:29 | ERROR | stderr | if lock.acquire(block, timeout):
217
+ 2024-12-26 20:09:29 | ERROR | stderr | KeyboardInterrupt
218
+ 2024-12-26 20:09:29 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
219
+ 2024-12-26 20:09:29 | ERROR | stderr | Traceback (most recent call last):
220
+ 2024-12-26 20:09:29 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
221
+ 2024-12-26 20:09:29 | ERROR | stderr | lock.acquire()
222
+ 2024-12-26 20:09:29 | ERROR | stderr | KeyboardInterrupt:
223
+ 2024-12-26 20:09:34 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
224
+ 2024-12-26 20:09:34 | INFO | stdout |
225
+ 2024-12-26 20:09:34 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
226
+ 2024-12-26 20:10:51 | INFO | stdout | Keyboard interruption in main thread... closing server.
227
+ 2024-12-26 20:10:51 | ERROR | stderr | Traceback (most recent call last):
228
+ 2024-12-26 20:10:51 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
229
+ 2024-12-26 20:10:51 | ERROR | stderr | time.sleep(0.1)
230
+ 2024-12-26 20:10:51 | ERROR | stderr | KeyboardInterrupt
231
+ 2024-12-26 20:10:51 | ERROR | stderr |
232
+ 2024-12-26 20:10:51 | ERROR | stderr | During handling of the above exception, another exception occurred:
233
+ 2024-12-26 20:10:51 | ERROR | stderr |
234
+ 2024-12-26 20:10:51 | ERROR | stderr | Traceback (most recent call last):
235
+ 2024-12-26 20:10:51 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 64, in <module>
236
+ 2024-12-26 20:10:51 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
237
+ 2024-12-26 20:10:51 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
238
+ 2024-12-26 20:10:51 | ERROR | stderr | self.block_thread()
239
+ 2024-12-26 20:10:51 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
240
+ 2024-12-26 20:10:51 | ERROR | stderr | self.server.close()
241
+ 2024-12-26 20:10:51 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
242
+ 2024-12-26 20:10:51 | ERROR | stderr | self.thread.join(timeout=5)
243
+ 2024-12-26 20:10:51 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
244
+ 2024-12-26 20:10:51 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
245
+ 2024-12-26 20:10:51 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
246
+ 2024-12-26 20:10:51 | ERROR | stderr | if lock.acquire(block, timeout):
247
+ 2024-12-26 20:10:51 | ERROR | stderr | KeyboardInterrupt
248
+ 2024-12-26 20:10:51 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
249
+ 2024-12-26 20:10:51 | ERROR | stderr | Traceback (most recent call last):
250
+ 2024-12-26 20:10:51 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
251
+ 2024-12-26 20:10:51 | ERROR | stderr | lock.acquire()
252
+ 2024-12-26 20:10:51 | ERROR | stderr | KeyboardInterrupt:
253
+ 2024-12-26 20:11:17 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
254
+ 2024-12-26 20:11:17 | INFO | stdout |
255
+ 2024-12-26 20:11:17 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
256
+ 2024-12-26 20:11:33 | INFO | stdout | Keyboard interruption in main thread... closing server.
257
+ 2024-12-26 20:11:33 | ERROR | stderr | Traceback (most recent call last):
258
+ 2024-12-26 20:11:33 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
259
+ 2024-12-26 20:11:33 | ERROR | stderr | time.sleep(0.1)
260
+ 2024-12-26 20:11:33 | ERROR | stderr | KeyboardInterrupt
261
+ 2024-12-26 20:11:33 | ERROR | stderr |
262
+ 2024-12-26 20:11:33 | ERROR | stderr | During handling of the above exception, another exception occurred:
263
+ 2024-12-26 20:11:33 | ERROR | stderr |
264
+ 2024-12-26 20:11:33 | ERROR | stderr | Traceback (most recent call last):
265
+ 2024-12-26 20:11:33 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 64, in <module>
266
+ 2024-12-26 20:11:33 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
267
+ 2024-12-26 20:11:33 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
268
+ 2024-12-26 20:11:33 | ERROR | stderr | self.block_thread()
269
+ 2024-12-26 20:11:33 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
270
+ 2024-12-26 20:11:33 | ERROR | stderr | self.server.close()
271
+ 2024-12-26 20:11:33 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
272
+ 2024-12-26 20:11:33 | ERROR | stderr | self.thread.join(timeout=5)
273
+ 2024-12-26 20:11:33 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
274
+ 2024-12-26 20:11:33 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
275
+ 2024-12-26 20:11:33 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
276
+ 2024-12-26 20:11:33 | ERROR | stderr | if lock.acquire(block, timeout):
277
+ 2024-12-26 20:11:33 | ERROR | stderr | KeyboardInterrupt
278
+ 2024-12-26 20:11:33 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
279
+ 2024-12-26 20:11:33 | ERROR | stderr | Traceback (most recent call last):
280
+ 2024-12-26 20:11:33 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
281
+ 2024-12-26 20:11:33 | ERROR | stderr | lock.acquire()
282
+ 2024-12-26 20:11:33 | ERROR | stderr | KeyboardInterrupt:
283
+ 2024-12-26 20:13:30 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
284
+ 2024-12-26 20:13:30 | INFO | stdout |
285
+ 2024-12-26 20:13:30 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
286
+ 2024-12-26 20:18:33 | INFO | stdout | Keyboard interruption in main thread... closing server.
287
+ 2024-12-26 20:18:33 | ERROR | stderr | Traceback (most recent call last):
288
+ 2024-12-26 20:18:33 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
289
+ 2024-12-26 20:18:33 | ERROR | stderr | time.sleep(0.1)
290
+ 2024-12-26 20:18:33 | ERROR | stderr | KeyboardInterrupt
291
+ 2024-12-26 20:18:33 | ERROR | stderr |
292
+ 2024-12-26 20:18:33 | ERROR | stderr | During handling of the above exception, another exception occurred:
293
+ 2024-12-26 20:18:33 | ERROR | stderr |
294
+ 2024-12-26 20:18:33 | ERROR | stderr | Traceback (most recent call last):
295
+ 2024-12-26 20:18:33 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 64, in <module>
296
+ 2024-12-26 20:18:33 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
297
+ 2024-12-26 20:18:33 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
298
+ 2024-12-26 20:18:33 | ERROR | stderr | self.block_thread()
299
+ 2024-12-26 20:18:33 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
300
+ 2024-12-26 20:18:33 | ERROR | stderr | self.server.close()
301
+ 2024-12-26 20:18:33 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
302
+ 2024-12-26 20:18:33 | ERROR | stderr | self.thread.join(timeout=5)
303
+ 2024-12-26 20:18:33 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
304
+ 2024-12-26 20:18:33 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
305
+ 2024-12-26 20:18:33 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
306
+ 2024-12-26 20:18:33 | ERROR | stderr | if lock.acquire(block, timeout):
307
+ 2024-12-26 20:18:33 | ERROR | stderr | KeyboardInterrupt
308
+ 2024-12-26 20:18:40 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
309
+ 2024-12-26 20:18:40 | INFO | stdout |
310
+ 2024-12-26 20:18:40 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
311
+ 2024-12-26 20:23:29 | INFO | stdout | Keyboard interruption in main thread... closing server.
312
+ 2024-12-26 20:23:29 | ERROR | stderr | Traceback (most recent call last):
313
+ 2024-12-26 20:23:29 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
314
+ 2024-12-26 20:23:29 | ERROR | stderr | time.sleep(0.1)
315
+ 2024-12-26 20:23:29 | ERROR | stderr | KeyboardInterrupt
316
+ 2024-12-26 20:23:29 | ERROR | stderr |
317
+ 2024-12-26 20:23:29 | ERROR | stderr | During handling of the above exception, another exception occurred:
318
+ 2024-12-26 20:23:29 | ERROR | stderr |
319
+ 2024-12-26 20:23:29 | ERROR | stderr | Traceback (most recent call last):
320
+ 2024-12-26 20:23:29 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 64, in <module>
321
+ 2024-12-26 20:23:29 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
322
+ 2024-12-26 20:23:29 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
323
+ 2024-12-26 20:23:29 | ERROR | stderr | self.block_thread()
324
+ 2024-12-26 20:23:29 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
325
+ 2024-12-26 20:23:29 | ERROR | stderr | self.server.close()
326
+ 2024-12-26 20:23:29 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
327
+ 2024-12-26 20:23:29 | ERROR | stderr | self.thread.join(timeout=5)
328
+ 2024-12-26 20:23:29 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
329
+ 2024-12-26 20:23:29 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
330
+ 2024-12-26 20:23:29 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
331
+ 2024-12-26 20:23:29 | ERROR | stderr | if lock.acquire(block, timeout):
332
+ 2024-12-26 20:23:29 | ERROR | stderr | KeyboardInterrupt
333
+ 2024-12-26 20:23:33 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
334
+ 2024-12-26 20:23:34 | INFO | stdout |
335
+ 2024-12-26 20:23:34 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
336
+ 2024-12-26 20:24:01 | INFO | stdout | Keyboard interruption in main thread... closing server.
337
+ 2024-12-26 20:24:01 | ERROR | stderr | Traceback (most recent call last):
338
+ 2024-12-26 20:24:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
339
+ 2024-12-26 20:24:01 | ERROR | stderr | time.sleep(0.1)
340
+ 2024-12-26 20:24:01 | ERROR | stderr | KeyboardInterrupt
341
+ 2024-12-26 20:24:01 | ERROR | stderr |
342
+ 2024-12-26 20:24:01 | ERROR | stderr | During handling of the above exception, another exception occurred:
343
+ 2024-12-26 20:24:01 | ERROR | stderr |
344
+ 2024-12-26 20:24:01 | ERROR | stderr | Traceback (most recent call last):
345
+ 2024-12-26 20:24:01 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 64, in <module>
346
+ 2024-12-26 20:24:01 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
347
+ 2024-12-26 20:24:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
348
+ 2024-12-26 20:24:01 | ERROR | stderr | self.block_thread()
349
+ 2024-12-26 20:24:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
350
+ 2024-12-26 20:24:01 | ERROR | stderr | self.server.close()
351
+ 2024-12-26 20:24:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
352
+ 2024-12-26 20:24:01 | ERROR | stderr | self.thread.join(timeout=5)
353
+ 2024-12-26 20:24:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
354
+ 2024-12-26 20:24:01 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
355
+ 2024-12-26 20:24:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
356
+ 2024-12-26 20:24:01 | ERROR | stderr | if lock.acquire(block, timeout):
357
+ 2024-12-26 20:24:01 | ERROR | stderr | KeyboardInterrupt
358
+ 2024-12-26 20:24:01 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
359
+ 2024-12-26 20:24:01 | ERROR | stderr | Traceback (most recent call last):
360
+ 2024-12-26 20:24:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
361
+ 2024-12-26 20:24:01 | ERROR | stderr | lock.acquire()
362
+ 2024-12-26 20:24:01 | ERROR | stderr | KeyboardInterrupt:
363
+ 2024-12-26 20:25:01 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
364
+ 2024-12-26 20:25:01 | INFO | stdout |
365
+ 2024-12-26 20:25:01 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
366
+ 2024-12-26 20:25:23 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
367
+ 2024-12-26 20:25:35 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
368
+ 2024-12-26 20:25:42 | INFO | gradio_web_server_image_generation_multi | bothbad_vote (named). ip: 127.0.0.1
369
+ 2024-12-26 20:25:49 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
370
+ 2024-12-26 20:26:02 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
371
+ 2024-12-26 20:26:08 | INFO | gradio_web_server_image_generation_multi | bothbad_vote (named). ip: 127.0.0.1
372
+ 2024-12-26 20:26:16 | INFO | gradio_web_server_image_generation_multi | bothbad_vote (named). ip: 127.0.0.1
373
+ 2024-12-26 20:26:22 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
374
+ 2024-12-26 20:26:33 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
375
+ 2024-12-26 20:26:39 | INFO | gradio_web_server_image_generation_multi | bothbad_vote (named). ip: 127.0.0.1
376
+ 2024-12-26 20:26:52 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
377
+ 2024-12-26 20:27:01 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
378
+ 2024-12-26 20:27:19 | INFO | stdout | Keyboard interruption in main thread... closing server.
379
+ 2024-12-26 20:27:19 | ERROR | stderr | Traceback (most recent call last):
380
+ 2024-12-26 20:27:19 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
381
+ 2024-12-26 20:27:19 | ERROR | stderr | time.sleep(0.1)
382
+ 2024-12-26 20:27:19 | ERROR | stderr | KeyboardInterrupt
383
+ 2024-12-26 20:27:19 | ERROR | stderr |
384
+ 2024-12-26 20:27:19 | ERROR | stderr | During handling of the above exception, another exception occurred:
385
+ 2024-12-26 20:27:19 | ERROR | stderr |
386
+ 2024-12-26 20:27:19 | ERROR | stderr | Traceback (most recent call last):
387
+ 2024-12-26 20:27:19 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 64, in <module>
388
+ 2024-12-26 20:27:19 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
389
+ 2024-12-26 20:27:19 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
390
+ 2024-12-26 20:27:19 | ERROR | stderr | self.block_thread()
391
+ 2024-12-26 20:27:19 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
392
+ 2024-12-26 20:27:19 | ERROR | stderr | self.server.close()
393
+ 2024-12-26 20:27:19 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
394
+ 2024-12-26 20:27:19 | ERROR | stderr | self.thread.join(timeout=5)
395
+ 2024-12-26 20:27:19 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
396
+ 2024-12-26 20:27:19 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
397
+ 2024-12-26 20:27:19 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
398
+ 2024-12-26 20:27:19 | ERROR | stderr | if lock.acquire(block, timeout):
399
+ 2024-12-26 20:27:19 | ERROR | stderr | KeyboardInterrupt
400
+ 2024-12-26 20:27:20 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
401
+ 2024-12-26 20:27:20 | ERROR | stderr | Traceback (most recent call last):
402
+ 2024-12-26 20:27:20 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
403
+ 2024-12-26 20:27:20 | ERROR | stderr | lock.acquire()
404
+ 2024-12-26 20:27:20 | ERROR | stderr | KeyboardInterrupt:
405
+ 2024-12-26 20:27:58 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
406
+ 2024-12-26 20:27:58 | INFO | stdout |
407
+ 2024-12-26 20:27:58 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
408
+ 2024-12-26 20:28:20 | INFO | stdout | Keyboard interruption in main thread... closing server.
409
+ 2024-12-26 20:28:20 | ERROR | stderr | Traceback (most recent call last):
410
+ 2024-12-26 20:28:20 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
411
+ 2024-12-26 20:28:20 | ERROR | stderr | time.sleep(0.1)
412
+ 2024-12-26 20:28:20 | ERROR | stderr | KeyboardInterrupt
413
+ 2024-12-26 20:28:20 | ERROR | stderr |
414
+ 2024-12-26 20:28:20 | ERROR | stderr | During handling of the above exception, another exception occurred:
415
+ 2024-12-26 20:28:20 | ERROR | stderr |
416
+ 2024-12-26 20:28:20 | ERROR | stderr | Traceback (most recent call last):
417
+ 2024-12-26 20:28:20 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 64, in <module>
418
+ 2024-12-26 20:28:20 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
419
+ 2024-12-26 20:28:20 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
420
+ 2024-12-26 20:28:20 | ERROR | stderr | self.block_thread()
421
+ 2024-12-26 20:28:20 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
422
+ 2024-12-26 20:28:20 | ERROR | stderr | self.server.close()
423
+ 2024-12-26 20:28:20 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
424
+ 2024-12-26 20:28:20 | ERROR | stderr | self.thread.join(timeout=5)
425
+ 2024-12-26 20:28:20 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
426
+ 2024-12-26 20:28:20 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
427
+ 2024-12-26 20:28:20 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
428
+ 2024-12-26 20:28:20 | ERROR | stderr | if lock.acquire(block, timeout):
429
+ 2024-12-26 20:28:20 | ERROR | stderr | KeyboardInterrupt
430
+ 2024-12-26 20:28:20 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
431
+ 2024-12-26 20:28:20 | ERROR | stderr | Traceback (most recent call last):
432
+ 2024-12-26 20:28:20 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
433
+ 2024-12-26 20:28:20 | ERROR | stderr | lock.acquire()
434
+ 2024-12-26 20:28:20 | ERROR | stderr | KeyboardInterrupt:
435
+ 2024-12-27 15:58:24 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
436
+ 2024-12-27 15:58:24 | INFO | stdout |
437
+ 2024-12-27 15:58:24 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
438
+ 2024-12-27 15:59:14 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
439
+ 2024-12-27 15:59:25 | INFO | gradio_web_server_image_generation_multi | bothbad_vote (named). ip: 127.0.0.1
440
+ 2024-12-27 15:59:34 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
441
+ 2024-12-27 15:59:42 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
442
+ 2024-12-27 16:00:57 | INFO | stdout | Keyboard interruption in main thread... closing server.
443
+ 2024-12-27 16:00:57 | INFO | stdout | Rerunning server... use `close()` to stop if you need to change `launch()` parameters.
444
+ 2024-12-27 16:00:57 | INFO | stdout | ----
445
+ 2024-12-27 16:00:57 | ERROR | stderr | Traceback (most recent call last):
446
+ 2024-12-27 16:00:57 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/huggingface/IDEA-Bench-Arena/app.py", line 65, in <module>
447
+ 2024-12-27 16:00:57 | ERROR | stderr | demo.launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
448
+ 2024-12-27 16:00:57 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2619, in launch
449
+ 2024-12-27 16:00:57 | ERROR | stderr | raise ValueError(
450
+ 2024-12-27 16:00:57 | ERROR | stderr | ValueError: When localhost is not accessible, a shareable link must be created. Please set share=True or check your proxy settings to allow access to localhost.
logs/vote_log/gr_web_image_generation_multi.log.2024-12-25 ADDED
@@ -0,0 +1,797 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-12-24 14:01:42 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
2
+ 2024-12-24 14:01:42 | INFO | stdout |
3
+ 2024-12-24 14:01:42 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
4
+ 2024-12-24 14:02:18 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
5
+ 2024-12-24 14:02:18 | ERROR | stderr | Traceback (most recent call last):
6
+ 2024-12-24 14:02:18 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/queueing.py", line 625, in process_events
7
+ 2024-12-24 14:02:18 | ERROR | stderr | response = await route_utils.call_process_api(
8
+ 2024-12-24 14:02:18 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/route_utils.py", line 322, in call_process_api
9
+ 2024-12-24 14:02:18 | ERROR | stderr | output = await app.get_blocks().process_api(
10
+ 2024-12-24 14:02:18 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2047, in process_api
11
+ 2024-12-24 14:02:18 | ERROR | stderr | result = await self.call_function(
12
+ 2024-12-24 14:02:18 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 1594, in call_function
13
+ 2024-12-24 14:02:18 | ERROR | stderr | prediction = await anyio.to_thread.run_sync( # type: ignore
14
+ 2024-12-24 14:02:18 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/anyio/to_thread.py", line 56, in run_sync
15
+ 2024-12-24 14:02:18 | ERROR | stderr | return await get_async_backend().run_sync_in_worker_thread(
16
+ 2024-12-24 14:02:18 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 2505, in run_sync_in_worker_thread
17
+ 2024-12-24 14:02:18 | ERROR | stderr | return await future
18
+ 2024-12-24 14:02:18 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 1005, in run
19
+ 2024-12-24 14:02:18 | ERROR | stderr | result = context.run(func, *args)
20
+ 2024-12-24 14:02:18 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/utils.py", line 869, in wrapper
21
+ 2024-12-24 14:02:18 | ERROR | stderr | response = f(*args, **kwargs)
22
+ 2024-12-24 14:02:18 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/serve/vote_utils.py", line 53, in rightvote_last_response_igm
23
+ 2024-12-24 14:02:18 | ERROR | stderr | vote_last_response_igm(
24
+ 2024-12-24 14:02:18 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/serve/vote_utils.py", line 32, in vote_last_response_igm
25
+ 2024-12-24 14:02:18 | ERROR | stderr | append_json_item_on_log_server(data, get_conv_log_filename())
26
+ 2024-12-24 14:02:18 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/serve/utils.py", line 171, in append_json_item_on_log_server
27
+ 2024-12-24 14:02:18 | ERROR | stderr | response = requests.post(url, data={'json_str': json_item, 'file_name': log_file})
28
+ 2024-12-24 14:02:18 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/requests/api.py", line 115, in post
29
+ 2024-12-24 14:02:18 | ERROR | stderr | return request("post", url, data=data, json=json, **kwargs)
30
+ 2024-12-24 14:02:18 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/requests/api.py", line 59, in request
31
+ 2024-12-24 14:02:18 | ERROR | stderr | return session.request(method=method, url=url, **kwargs)
32
+ 2024-12-24 14:02:18 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/requests/sessions.py", line 575, in request
33
+ 2024-12-24 14:02:18 | ERROR | stderr | prep = self.prepare_request(req)
34
+ 2024-12-24 14:02:18 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/requests/sessions.py", line 484, in prepare_request
35
+ 2024-12-24 14:02:18 | ERROR | stderr | p.prepare(
36
+ 2024-12-24 14:02:18 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/requests/models.py", line 367, in prepare
37
+ 2024-12-24 14:02:18 | ERROR | stderr | self.prepare_url(url, params)
38
+ 2024-12-24 14:02:18 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/requests/models.py", line 438, in prepare_url
39
+ 2024-12-24 14:02:18 | ERROR | stderr | raise MissingSchema(
40
+ 2024-12-24 14:02:18 | ERROR | stderr | requests.exceptions.MissingSchema: Invalid URL '/logs/append_json': No scheme supplied. Perhaps you meant https:///logs/append_json?
41
+ 2024-12-24 14:06:36 | INFO | stdout | Keyboard interruption in main thread... closing server.
42
+ 2024-12-24 14:06:36 | ERROR | stderr | Traceback (most recent call last):
43
+ 2024-12-24 14:06:36 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
44
+ 2024-12-24 14:06:36 | ERROR | stderr | time.sleep(0.1)
45
+ 2024-12-24 14:06:36 | ERROR | stderr | KeyboardInterrupt
46
+ 2024-12-24 14:06:36 | ERROR | stderr |
47
+ 2024-12-24 14:06:36 | ERROR | stderr | During handling of the above exception, another exception occurred:
48
+ 2024-12-24 14:06:36 | ERROR | stderr |
49
+ 2024-12-24 14:06:36 | ERROR | stderr | Traceback (most recent call last):
50
+ 2024-12-24 14:06:36 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
51
+ 2024-12-24 14:06:36 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
52
+ 2024-12-24 14:06:36 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
53
+ 2024-12-24 14:06:36 | ERROR | stderr | self.block_thread()
54
+ 2024-12-24 14:06:36 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
55
+ 2024-12-24 14:06:36 | ERROR | stderr | self.server.close()
56
+ 2024-12-24 14:06:36 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
57
+ 2024-12-24 14:06:36 | ERROR | stderr | self.thread.join(timeout=5)
58
+ 2024-12-24 14:06:36 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
59
+ 2024-12-24 14:06:36 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
60
+ 2024-12-24 14:06:36 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
61
+ 2024-12-24 14:06:36 | ERROR | stderr | if lock.acquire(block, timeout):
62
+ 2024-12-24 14:06:36 | ERROR | stderr | KeyboardInterrupt
63
+ 2024-12-24 14:06:41 | ERROR | stderr | Traceback (most recent call last):
64
+ 2024-12-24 14:06:41 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 77, in <module>
65
+ 2024-12-24 14:06:41 | ERROR | stderr | demo = build_combine_demo(models, elo_results_file, leaderboard_table_file)
66
+ 2024-12-24 14:06:41 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 27, in build_combine_demo
67
+ 2024-12-24 14:06:41 | ERROR | stderr | build_side_by_side_ui_named(models)
68
+ 2024-12-24 14:06:41 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/serve/gradio_web.py", line 226, in build_side_by_side_ui_named
69
+ 2024-12-24 14:06:41 | ERROR | stderr | leftvote_btn = gr.Button(
70
+ 2024-12-24 14:06:41 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/component_meta.py", line 179, in wrapper
71
+ 2024-12-24 14:06:41 | ERROR | stderr | return fn(self, **kwargs)
72
+ 2024-12-24 14:06:41 | ERROR | stderr | TypeError: Button.__init__() got an unexpected keyword argument 'style'
73
+ 2024-12-24 14:13:32 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
74
+ 2024-12-24 14:13:32 | INFO | stdout |
75
+ 2024-12-24 14:13:32 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
76
+ 2024-12-24 14:14:04 | INFO | stdout | Keyboard interruption in main thread... closing server.
77
+ 2024-12-24 14:14:04 | ERROR | stderr | Traceback (most recent call last):
78
+ 2024-12-24 14:14:04 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
79
+ 2024-12-24 14:14:04 | ERROR | stderr | time.sleep(0.1)
80
+ 2024-12-24 14:14:04 | ERROR | stderr | KeyboardInterrupt
81
+ 2024-12-24 14:14:04 | ERROR | stderr |
82
+ 2024-12-24 14:14:04 | ERROR | stderr | During handling of the above exception, another exception occurred:
83
+ 2024-12-24 14:14:04 | ERROR | stderr |
84
+ 2024-12-24 14:14:04 | ERROR | stderr | Traceback (most recent call last):
85
+ 2024-12-24 14:14:04 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
86
+ 2024-12-24 14:14:04 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
87
+ 2024-12-24 14:14:04 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
88
+ 2024-12-24 14:14:04 | ERROR | stderr | self.block_thread()
89
+ 2024-12-24 14:14:04 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
90
+ 2024-12-24 14:14:04 | ERROR | stderr | self.server.close()
91
+ 2024-12-24 14:14:04 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
92
+ 2024-12-24 14:14:04 | ERROR | stderr | self.thread.join(timeout=5)
93
+ 2024-12-24 14:14:04 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
94
+ 2024-12-24 14:14:04 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
95
+ 2024-12-24 14:14:04 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
96
+ 2024-12-24 14:14:04 | ERROR | stderr | if lock.acquire(block, timeout):
97
+ 2024-12-24 14:14:04 | ERROR | stderr | KeyboardInterrupt
98
+ 2024-12-24 14:14:04 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
99
+ 2024-12-24 14:14:04 | ERROR | stderr | Traceback (most recent call last):
100
+ 2024-12-24 14:14:04 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
101
+ 2024-12-24 14:14:04 | ERROR | stderr | lock.acquire()
102
+ 2024-12-24 14:14:04 | ERROR | stderr | KeyboardInterrupt:
103
+ 2024-12-24 14:14:09 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
104
+ 2024-12-24 14:14:09 | INFO | stdout |
105
+ 2024-12-24 14:14:09 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
106
+ 2024-12-24 14:16:39 | INFO | stdout | Keyboard interruption in main thread... closing server.
107
+ 2024-12-24 14:16:39 | ERROR | stderr | Traceback (most recent call last):
108
+ 2024-12-24 14:16:39 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
109
+ 2024-12-24 14:16:39 | ERROR | stderr | time.sleep(0.1)
110
+ 2024-12-24 14:16:39 | ERROR | stderr | KeyboardInterrupt
111
+ 2024-12-24 14:16:39 | ERROR | stderr |
112
+ 2024-12-24 14:16:39 | ERROR | stderr | During handling of the above exception, another exception occurred:
113
+ 2024-12-24 14:16:39 | ERROR | stderr |
114
+ 2024-12-24 14:16:39 | ERROR | stderr | Traceback (most recent call last):
115
+ 2024-12-24 14:16:39 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
116
+ 2024-12-24 14:16:39 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
117
+ 2024-12-24 14:16:39 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
118
+ 2024-12-24 14:16:39 | ERROR | stderr | self.block_thread()
119
+ 2024-12-24 14:16:39 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
120
+ 2024-12-24 14:16:39 | ERROR | stderr | self.server.close()
121
+ 2024-12-24 14:16:39 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
122
+ 2024-12-24 14:16:39 | ERROR | stderr | self.thread.join(timeout=5)
123
+ 2024-12-24 14:16:39 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
124
+ 2024-12-24 14:16:39 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
125
+ 2024-12-24 14:16:39 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
126
+ 2024-12-24 14:16:39 | ERROR | stderr | if lock.acquire(block, timeout):
127
+ 2024-12-24 14:16:39 | ERROR | stderr | KeyboardInterrupt
128
+ 2024-12-24 14:16:44 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
129
+ 2024-12-24 14:16:44 | INFO | stdout |
130
+ 2024-12-24 14:16:44 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
131
+ 2024-12-24 14:17:37 | INFO | stdout | Keyboard interruption in main thread... closing server.
132
+ 2024-12-24 14:17:38 | ERROR | stderr | Traceback (most recent call last):
133
+ 2024-12-24 14:17:38 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
134
+ 2024-12-24 14:17:38 | ERROR | stderr | time.sleep(0.1)
135
+ 2024-12-24 14:17:38 | ERROR | stderr | KeyboardInterrupt
136
+ 2024-12-24 14:17:38 | ERROR | stderr |
137
+ 2024-12-24 14:17:38 | ERROR | stderr | During handling of the above exception, another exception occurred:
138
+ 2024-12-24 14:17:38 | ERROR | stderr |
139
+ 2024-12-24 14:17:38 | ERROR | stderr | Traceback (most recent call last):
140
+ 2024-12-24 14:17:38 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
141
+ 2024-12-24 14:17:38 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
142
+ 2024-12-24 14:17:38 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
143
+ 2024-12-24 14:17:38 | ERROR | stderr | self.block_thread()
144
+ 2024-12-24 14:17:38 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
145
+ 2024-12-24 14:17:38 | ERROR | stderr | self.server.close()
146
+ 2024-12-24 14:17:38 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
147
+ 2024-12-24 14:17:38 | ERROR | stderr | self.thread.join(timeout=5)
148
+ 2024-12-24 14:17:38 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
149
+ 2024-12-24 14:17:38 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
150
+ 2024-12-24 14:17:38 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
151
+ 2024-12-24 14:17:38 | ERROR | stderr | if lock.acquire(block, timeout):
152
+ 2024-12-24 14:17:38 | ERROR | stderr | KeyboardInterrupt
153
+ 2024-12-24 14:17:39 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
154
+ 2024-12-24 14:17:39 | ERROR | stderr | Traceback (most recent call last):
155
+ 2024-12-24 14:17:39 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
156
+ 2024-12-24 14:17:39 | ERROR | stderr | lock.acquire()
157
+ 2024-12-24 14:17:39 | ERROR | stderr | KeyboardInterrupt:
158
+ 2024-12-24 14:17:44 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
159
+ 2024-12-24 14:17:44 | INFO | stdout |
160
+ 2024-12-24 14:17:44 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
161
+ 2024-12-24 14:17:54 | INFO | stdout | Keyboard interruption in main thread... closing server.
162
+ 2024-12-24 14:17:54 | ERROR | stderr | Traceback (most recent call last):
163
+ 2024-12-24 14:17:54 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
164
+ 2024-12-24 14:17:54 | ERROR | stderr | time.sleep(0.1)
165
+ 2024-12-24 14:17:54 | ERROR | stderr | KeyboardInterrupt
166
+ 2024-12-24 14:17:54 | ERROR | stderr |
167
+ 2024-12-24 14:17:54 | ERROR | stderr | During handling of the above exception, another exception occurred:
168
+ 2024-12-24 14:17:54 | ERROR | stderr |
169
+ 2024-12-24 14:17:54 | ERROR | stderr | Traceback (most recent call last):
170
+ 2024-12-24 14:17:54 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
171
+ 2024-12-24 14:17:54 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
172
+ 2024-12-24 14:17:54 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
173
+ 2024-12-24 14:17:54 | ERROR | stderr | self.block_thread()
174
+ 2024-12-24 14:17:54 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
175
+ 2024-12-24 14:17:54 | ERROR | stderr | self.server.close()
176
+ 2024-12-24 14:17:54 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
177
+ 2024-12-24 14:17:54 | ERROR | stderr | self.thread.join(timeout=5)
178
+ 2024-12-24 14:17:54 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
179
+ 2024-12-24 14:17:54 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
180
+ 2024-12-24 14:17:54 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
181
+ 2024-12-24 14:17:54 | ERROR | stderr | if lock.acquire(block, timeout):
182
+ 2024-12-24 14:17:54 | ERROR | stderr | KeyboardInterrupt
183
+ 2024-12-24 14:17:54 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
184
+ 2024-12-24 14:17:54 | ERROR | stderr | Traceback (most recent call last):
185
+ 2024-12-24 14:17:54 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
186
+ 2024-12-24 14:17:54 | ERROR | stderr | lock.acquire()
187
+ 2024-12-24 14:17:54 | ERROR | stderr | KeyboardInterrupt:
188
+ 2024-12-24 14:18:02 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
189
+ 2024-12-24 14:18:02 | INFO | stdout |
190
+ 2024-12-24 14:18:02 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
191
+ 2024-12-24 14:19:08 | INFO | stdout | Keyboard interruption in main thread... closing server.
192
+ 2024-12-24 14:19:09 | ERROR | stderr | Traceback (most recent call last):
193
+ 2024-12-24 14:19:09 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
194
+ 2024-12-24 14:19:09 | ERROR | stderr | time.sleep(0.1)
195
+ 2024-12-24 14:19:09 | ERROR | stderr | KeyboardInterrupt
196
+ 2024-12-24 14:19:09 | ERROR | stderr |
197
+ 2024-12-24 14:19:09 | ERROR | stderr | During handling of the above exception, another exception occurred:
198
+ 2024-12-24 14:19:09 | ERROR | stderr |
199
+ 2024-12-24 14:19:09 | ERROR | stderr | Traceback (most recent call last):
200
+ 2024-12-24 14:19:09 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
201
+ 2024-12-24 14:19:09 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
202
+ 2024-12-24 14:19:09 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
203
+ 2024-12-24 14:19:09 | ERROR | stderr | self.block_thread()
204
+ 2024-12-24 14:19:09 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
205
+ 2024-12-24 14:19:09 | ERROR | stderr | self.server.close()
206
+ 2024-12-24 14:19:09 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
207
+ 2024-12-24 14:19:09 | ERROR | stderr | self.thread.join(timeout=5)
208
+ 2024-12-24 14:19:09 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
209
+ 2024-12-24 14:19:09 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
210
+ 2024-12-24 14:19:09 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
211
+ 2024-12-24 14:19:09 | ERROR | stderr | if lock.acquire(block, timeout):
212
+ 2024-12-24 14:19:09 | ERROR | stderr | KeyboardInterrupt
213
+ 2024-12-24 14:19:09 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
214
+ 2024-12-24 14:19:09 | ERROR | stderr | Traceback (most recent call last):
215
+ 2024-12-24 14:19:09 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
216
+ 2024-12-24 14:19:09 | ERROR | stderr | lock.acquire()
217
+ 2024-12-24 14:19:09 | ERROR | stderr | KeyboardInterrupt:
218
+ 2024-12-24 14:19:14 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
219
+ 2024-12-24 14:19:14 | INFO | stdout |
220
+ 2024-12-24 14:19:14 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
221
+ 2024-12-24 14:19:25 | INFO | stdout | Keyboard interruption in main thread... closing server.
222
+ 2024-12-24 14:19:25 | ERROR | stderr | Traceback (most recent call last):
223
+ 2024-12-24 14:19:25 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
224
+ 2024-12-24 14:19:25 | ERROR | stderr | time.sleep(0.1)
225
+ 2024-12-24 14:19:25 | ERROR | stderr | KeyboardInterrupt
226
+ 2024-12-24 14:19:25 | ERROR | stderr |
227
+ 2024-12-24 14:19:25 | ERROR | stderr | During handling of the above exception, another exception occurred:
228
+ 2024-12-24 14:19:25 | ERROR | stderr |
229
+ 2024-12-24 14:19:25 | ERROR | stderr | Traceback (most recent call last):
230
+ 2024-12-24 14:19:25 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
231
+ 2024-12-24 14:19:25 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
232
+ 2024-12-24 14:19:25 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
233
+ 2024-12-24 14:19:25 | ERROR | stderr | self.block_thread()
234
+ 2024-12-24 14:19:25 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
235
+ 2024-12-24 14:19:25 | ERROR | stderr | self.server.close()
236
+ 2024-12-24 14:19:25 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
237
+ 2024-12-24 14:19:25 | ERROR | stderr | self.thread.join(timeout=5)
238
+ 2024-12-24 14:19:25 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
239
+ 2024-12-24 14:19:25 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
240
+ 2024-12-24 14:19:25 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
241
+ 2024-12-24 14:19:25 | ERROR | stderr | if lock.acquire(block, timeout):
242
+ 2024-12-24 14:19:25 | ERROR | stderr | KeyboardInterrupt
243
+ 2024-12-24 14:19:26 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
244
+ 2024-12-24 14:19:26 | ERROR | stderr | Traceback (most recent call last):
245
+ 2024-12-24 14:19:26 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
246
+ 2024-12-24 14:19:26 | ERROR | stderr | lock.acquire()
247
+ 2024-12-24 14:19:26 | ERROR | stderr | KeyboardInterrupt:
248
+ 2024-12-24 14:19:30 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
249
+ 2024-12-24 14:19:30 | INFO | stdout |
250
+ 2024-12-24 14:19:30 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
251
+ 2024-12-24 14:19:53 | INFO | stdout | Keyboard interruption in main thread... closing server.
252
+ 2024-12-24 14:19:54 | ERROR | stderr | Traceback (most recent call last):
253
+ 2024-12-24 14:19:54 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
254
+ 2024-12-24 14:19:54 | ERROR | stderr | time.sleep(0.1)
255
+ 2024-12-24 14:19:54 | ERROR | stderr | KeyboardInterrupt
256
+ 2024-12-24 14:19:54 | ERROR | stderr |
257
+ 2024-12-24 14:19:54 | ERROR | stderr | During handling of the above exception, another exception occurred:
258
+ 2024-12-24 14:19:54 | ERROR | stderr |
259
+ 2024-12-24 14:19:54 | ERROR | stderr | Traceback (most recent call last):
260
+ 2024-12-24 14:19:54 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
261
+ 2024-12-24 14:19:54 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
262
+ 2024-12-24 14:19:54 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
263
+ 2024-12-24 14:19:54 | ERROR | stderr | self.block_thread()
264
+ 2024-12-24 14:19:54 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
265
+ 2024-12-24 14:19:54 | ERROR | stderr | self.server.close()
266
+ 2024-12-24 14:19:54 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
267
+ 2024-12-24 14:19:54 | ERROR | stderr | self.thread.join(timeout=5)
268
+ 2024-12-24 14:19:54 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
269
+ 2024-12-24 14:19:54 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
270
+ 2024-12-24 14:19:54 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
271
+ 2024-12-24 14:19:54 | ERROR | stderr | if lock.acquire(block, timeout):
272
+ 2024-12-24 14:19:54 | ERROR | stderr | KeyboardInterrupt
273
+ 2024-12-24 14:19:54 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
274
+ 2024-12-24 14:19:54 | ERROR | stderr | Traceback (most recent call last):
275
+ 2024-12-24 14:19:54 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
276
+ 2024-12-24 14:19:54 | ERROR | stderr | lock.acquire()
277
+ 2024-12-24 14:19:54 | ERROR | stderr | KeyboardInterrupt:
278
+ 2024-12-24 14:20:25 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
279
+ 2024-12-24 14:20:25 | INFO | stdout |
280
+ 2024-12-24 14:20:25 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
281
+ 2024-12-24 14:25:19 | INFO | stdout | Keyboard interruption in main thread... closing server.
282
+ 2024-12-24 14:25:19 | ERROR | stderr | Traceback (most recent call last):
283
+ 2024-12-24 14:25:19 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
284
+ 2024-12-24 14:25:19 | ERROR | stderr | time.sleep(0.1)
285
+ 2024-12-24 14:25:19 | ERROR | stderr | KeyboardInterrupt
286
+ 2024-12-24 14:25:19 | ERROR | stderr |
287
+ 2024-12-24 14:25:19 | ERROR | stderr | During handling of the above exception, another exception occurred:
288
+ 2024-12-24 14:25:19 | ERROR | stderr |
289
+ 2024-12-24 14:25:19 | ERROR | stderr | Traceback (most recent call last):
290
+ 2024-12-24 14:25:19 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
291
+ 2024-12-24 14:25:19 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
292
+ 2024-12-24 14:25:19 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
293
+ 2024-12-24 14:25:19 | ERROR | stderr | self.block_thread()
294
+ 2024-12-24 14:25:19 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
295
+ 2024-12-24 14:25:19 | ERROR | stderr | self.server.close()
296
+ 2024-12-24 14:25:19 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
297
+ 2024-12-24 14:25:19 | ERROR | stderr | self.thread.join(timeout=5)
298
+ 2024-12-24 14:25:19 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
299
+ 2024-12-24 14:25:19 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
300
+ 2024-12-24 14:25:19 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
301
+ 2024-12-24 14:25:19 | ERROR | stderr | if lock.acquire(block, timeout):
302
+ 2024-12-24 14:25:19 | ERROR | stderr | KeyboardInterrupt
303
+ 2024-12-24 14:28:13 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
304
+ 2024-12-24 14:28:14 | INFO | stdout |
305
+ 2024-12-24 14:28:14 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
306
+ 2024-12-24 14:33:40 | INFO | stdout | Keyboard interruption in main thread... closing server.
307
+ 2024-12-24 14:33:40 | ERROR | stderr | Traceback (most recent call last):
308
+ 2024-12-24 14:33:40 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
309
+ 2024-12-24 14:33:40 | ERROR | stderr | time.sleep(0.1)
310
+ 2024-12-24 14:33:40 | ERROR | stderr | KeyboardInterrupt
311
+ 2024-12-24 14:33:40 | ERROR | stderr |
312
+ 2024-12-24 14:33:40 | ERROR | stderr | During handling of the above exception, another exception occurred:
313
+ 2024-12-24 14:33:40 | ERROR | stderr |
314
+ 2024-12-24 14:33:40 | ERROR | stderr | Traceback (most recent call last):
315
+ 2024-12-24 14:33:40 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
316
+ 2024-12-24 14:33:40 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
317
+ 2024-12-24 14:33:40 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
318
+ 2024-12-24 14:33:40 | ERROR | stderr | self.block_thread()
319
+ 2024-12-24 14:33:40 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
320
+ 2024-12-24 14:33:40 | ERROR | stderr | self.server.close()
321
+ 2024-12-24 14:33:40 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
322
+ 2024-12-24 14:33:40 | ERROR | stderr | self.thread.join(timeout=5)
323
+ 2024-12-24 14:33:40 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
324
+ 2024-12-24 14:33:40 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
325
+ 2024-12-24 14:33:40 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
326
+ 2024-12-24 14:33:40 | ERROR | stderr | if lock.acquire(block, timeout):
327
+ 2024-12-24 14:33:40 | ERROR | stderr | KeyboardInterrupt
328
+ 2024-12-24 14:33:45 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
329
+ 2024-12-24 14:33:45 | INFO | stdout |
330
+ 2024-12-24 14:33:45 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
331
+ 2024-12-24 14:34:55 | INFO | stdout | Keyboard interruption in main thread... closing server.
332
+ 2024-12-24 14:34:56 | ERROR | stderr | Traceback (most recent call last):
333
+ 2024-12-24 14:34:56 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
334
+ 2024-12-24 14:34:56 | ERROR | stderr | time.sleep(0.1)
335
+ 2024-12-24 14:34:56 | ERROR | stderr | KeyboardInterrupt
336
+ 2024-12-24 14:34:56 | ERROR | stderr |
337
+ 2024-12-24 14:34:56 | ERROR | stderr | During handling of the above exception, another exception occurred:
338
+ 2024-12-24 14:34:56 | ERROR | stderr |
339
+ 2024-12-24 14:34:56 | ERROR | stderr | Traceback (most recent call last):
340
+ 2024-12-24 14:34:56 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
341
+ 2024-12-24 14:34:56 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
342
+ 2024-12-24 14:34:56 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
343
+ 2024-12-24 14:34:56 | ERROR | stderr | self.block_thread()
344
+ 2024-12-24 14:34:56 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
345
+ 2024-12-24 14:34:56 | ERROR | stderr | self.server.close()
346
+ 2024-12-24 14:34:56 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
347
+ 2024-12-24 14:34:56 | ERROR | stderr | self.thread.join(timeout=5)
348
+ 2024-12-24 14:34:56 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
349
+ 2024-12-24 14:34:56 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
350
+ 2024-12-24 14:34:56 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
351
+ 2024-12-24 14:34:56 | ERROR | stderr | if lock.acquire(block, timeout):
352
+ 2024-12-24 14:34:56 | ERROR | stderr | KeyboardInterrupt
353
+ 2024-12-24 14:34:56 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
354
+ 2024-12-24 14:34:56 | ERROR | stderr | Traceback (most recent call last):
355
+ 2024-12-24 14:34:56 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
356
+ 2024-12-24 14:34:56 | ERROR | stderr | lock.acquire()
357
+ 2024-12-24 14:34:56 | ERROR | stderr | KeyboardInterrupt:
358
+ 2024-12-24 14:35:01 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
359
+ 2024-12-24 14:35:01 | INFO | stdout |
360
+ 2024-12-24 14:35:01 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
361
+ 2024-12-24 14:35:20 | INFO | stdout | Keyboard interruption in main thread... closing server.
362
+ 2024-12-24 14:35:20 | ERROR | stderr | Traceback (most recent call last):
363
+ 2024-12-24 14:35:20 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
364
+ 2024-12-24 14:35:20 | ERROR | stderr | time.sleep(0.1)
365
+ 2024-12-24 14:35:20 | ERROR | stderr | KeyboardInterrupt
366
+ 2024-12-24 14:35:20 | ERROR | stderr |
367
+ 2024-12-24 14:35:20 | ERROR | stderr | During handling of the above exception, another exception occurred:
368
+ 2024-12-24 14:35:20 | ERROR | stderr |
369
+ 2024-12-24 14:35:20 | ERROR | stderr | Traceback (most recent call last):
370
+ 2024-12-24 14:35:20 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
371
+ 2024-12-24 14:35:20 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
372
+ 2024-12-24 14:35:20 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
373
+ 2024-12-24 14:35:20 | ERROR | stderr | self.block_thread()
374
+ 2024-12-24 14:35:20 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
375
+ 2024-12-24 14:35:20 | ERROR | stderr | self.server.close()
376
+ 2024-12-24 14:35:20 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
377
+ 2024-12-24 14:35:20 | ERROR | stderr | self.thread.join(timeout=5)
378
+ 2024-12-24 14:35:20 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
379
+ 2024-12-24 14:35:20 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
380
+ 2024-12-24 14:35:20 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
381
+ 2024-12-24 14:35:20 | ERROR | stderr | if lock.acquire(block, timeout):
382
+ 2024-12-24 14:35:20 | ERROR | stderr | KeyboardInterrupt
383
+ 2024-12-24 14:35:20 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
384
+ 2024-12-24 14:35:20 | ERROR | stderr | Traceback (most recent call last):
385
+ 2024-12-24 14:35:20 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
386
+ 2024-12-24 14:35:20 | ERROR | stderr | lock.acquire()
387
+ 2024-12-24 14:35:20 | ERROR | stderr | KeyboardInterrupt:
388
+ 2024-12-24 14:35:26 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
389
+ 2024-12-24 14:35:26 | INFO | stdout |
390
+ 2024-12-24 14:35:26 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
391
+ 2024-12-24 14:35:34 | INFO | stdout | Keyboard interruption in main thread... closing server.
392
+ 2024-12-24 14:35:34 | ERROR | stderr | Traceback (most recent call last):
393
+ 2024-12-24 14:35:34 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
394
+ 2024-12-24 14:35:34 | ERROR | stderr | time.sleep(0.1)
395
+ 2024-12-24 14:35:34 | ERROR | stderr | KeyboardInterrupt
396
+ 2024-12-24 14:35:34 | ERROR | stderr |
397
+ 2024-12-24 14:35:34 | ERROR | stderr | During handling of the above exception, another exception occurred:
398
+ 2024-12-24 14:35:34 | ERROR | stderr |
399
+ 2024-12-24 14:35:34 | ERROR | stderr | Traceback (most recent call last):
400
+ 2024-12-24 14:35:34 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
401
+ 2024-12-24 14:35:34 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
402
+ 2024-12-24 14:35:34 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
403
+ 2024-12-24 14:35:34 | ERROR | stderr | self.block_thread()
404
+ 2024-12-24 14:35:34 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
405
+ 2024-12-24 14:35:34 | ERROR | stderr | self.server.close()
406
+ 2024-12-24 14:35:34 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
407
+ 2024-12-24 14:35:34 | ERROR | stderr | self.thread.join(timeout=5)
408
+ 2024-12-24 14:35:34 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
409
+ 2024-12-24 14:35:34 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
410
+ 2024-12-24 14:35:34 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
411
+ 2024-12-24 14:35:34 | ERROR | stderr | if lock.acquire(block, timeout):
412
+ 2024-12-24 14:35:34 | ERROR | stderr | KeyboardInterrupt
413
+ 2024-12-24 14:35:34 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
414
+ 2024-12-24 14:35:34 | ERROR | stderr | Traceback (most recent call last):
415
+ 2024-12-24 14:35:34 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
416
+ 2024-12-24 14:35:34 | ERROR | stderr | lock.acquire()
417
+ 2024-12-24 14:35:34 | ERROR | stderr | KeyboardInterrupt:
418
+ 2024-12-24 14:35:39 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
419
+ 2024-12-24 14:35:39 | INFO | stdout |
420
+ 2024-12-24 14:35:39 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
421
+ 2024-12-24 14:36:28 | INFO | stdout | Keyboard interruption in main thread... closing server.
422
+ 2024-12-24 14:36:28 | ERROR | stderr | Traceback (most recent call last):
423
+ 2024-12-24 14:36:28 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
424
+ 2024-12-24 14:36:28 | ERROR | stderr | time.sleep(0.1)
425
+ 2024-12-24 14:36:28 | ERROR | stderr | KeyboardInterrupt
426
+ 2024-12-24 14:36:28 | ERROR | stderr |
427
+ 2024-12-24 14:36:28 | ERROR | stderr | During handling of the above exception, another exception occurred:
428
+ 2024-12-24 14:36:28 | ERROR | stderr |
429
+ 2024-12-24 14:36:28 | ERROR | stderr | Traceback (most recent call last):
430
+ 2024-12-24 14:36:28 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
431
+ 2024-12-24 14:36:28 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
432
+ 2024-12-24 14:36:28 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
433
+ 2024-12-24 14:36:28 | ERROR | stderr | self.block_thread()
434
+ 2024-12-24 14:36:28 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
435
+ 2024-12-24 14:36:28 | ERROR | stderr | self.server.close()
436
+ 2024-12-24 14:36:28 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
437
+ 2024-12-24 14:36:28 | ERROR | stderr | self.thread.join(timeout=5)
438
+ 2024-12-24 14:36:28 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
439
+ 2024-12-24 14:36:28 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
440
+ 2024-12-24 14:36:28 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
441
+ 2024-12-24 14:36:28 | ERROR | stderr | if lock.acquire(block, timeout):
442
+ 2024-12-24 14:36:28 | ERROR | stderr | KeyboardInterrupt
443
+ 2024-12-24 14:36:29 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
444
+ 2024-12-24 14:36:29 | ERROR | stderr | Traceback (most recent call last):
445
+ 2024-12-24 14:36:29 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
446
+ 2024-12-24 14:36:29 | ERROR | stderr | lock.acquire()
447
+ 2024-12-24 14:36:29 | ERROR | stderr | KeyboardInterrupt:
448
+ 2024-12-24 14:36:33 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
449
+ 2024-12-24 14:36:33 | INFO | stdout |
450
+ 2024-12-24 14:36:33 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
451
+ 2024-12-24 14:36:41 | INFO | stdout | Keyboard interruption in main thread... closing server.
452
+ 2024-12-24 14:36:41 | ERROR | stderr | Traceback (most recent call last):
453
+ 2024-12-24 14:36:41 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
454
+ 2024-12-24 14:36:41 | ERROR | stderr | time.sleep(0.1)
455
+ 2024-12-24 14:36:41 | ERROR | stderr | KeyboardInterrupt
456
+ 2024-12-24 14:36:41 | ERROR | stderr |
457
+ 2024-12-24 14:36:41 | ERROR | stderr | During handling of the above exception, another exception occurred:
458
+ 2024-12-24 14:36:41 | ERROR | stderr |
459
+ 2024-12-24 14:36:41 | ERROR | stderr | Traceback (most recent call last):
460
+ 2024-12-24 14:36:41 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
461
+ 2024-12-24 14:36:41 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
462
+ 2024-12-24 14:36:41 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
463
+ 2024-12-24 14:36:41 | ERROR | stderr | self.block_thread()
464
+ 2024-12-24 14:36:41 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
465
+ 2024-12-24 14:36:41 | ERROR | stderr | self.server.close()
466
+ 2024-12-24 14:36:41 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
467
+ 2024-12-24 14:36:41 | ERROR | stderr | self.thread.join(timeout=5)
468
+ 2024-12-24 14:36:41 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
469
+ 2024-12-24 14:36:41 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
470
+ 2024-12-24 14:36:41 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
471
+ 2024-12-24 14:36:41 | ERROR | stderr | if lock.acquire(block, timeout):
472
+ 2024-12-24 14:36:41 | ERROR | stderr | KeyboardInterrupt
473
+ 2024-12-24 14:36:41 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
474
+ 2024-12-24 14:36:41 | ERROR | stderr | Traceback (most recent call last):
475
+ 2024-12-24 14:36:41 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
476
+ 2024-12-24 14:36:41 | ERROR | stderr | lock.acquire()
477
+ 2024-12-24 14:36:41 | ERROR | stderr | KeyboardInterrupt:
478
+ 2024-12-24 14:37:47 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
479
+ 2024-12-24 14:37:47 | INFO | stdout |
480
+ 2024-12-24 14:37:47 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
481
+ 2024-12-24 14:40:00 | INFO | stdout | Keyboard interruption in main thread... closing server.
482
+ 2024-12-24 14:40:00 | ERROR | stderr | Traceback (most recent call last):
483
+ 2024-12-24 14:40:00 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
484
+ 2024-12-24 14:40:00 | ERROR | stderr | time.sleep(0.1)
485
+ 2024-12-24 14:40:00 | ERROR | stderr | KeyboardInterrupt
486
+ 2024-12-24 14:40:00 | ERROR | stderr |
487
+ 2024-12-24 14:40:00 | ERROR | stderr | During handling of the above exception, another exception occurred:
488
+ 2024-12-24 14:40:00 | ERROR | stderr |
489
+ 2024-12-24 14:40:00 | ERROR | stderr | Traceback (most recent call last):
490
+ 2024-12-24 14:40:00 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
491
+ 2024-12-24 14:40:00 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
492
+ 2024-12-24 14:40:00 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
493
+ 2024-12-24 14:40:00 | ERROR | stderr | self.block_thread()
494
+ 2024-12-24 14:40:00 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
495
+ 2024-12-24 14:40:00 | ERROR | stderr | self.server.close()
496
+ 2024-12-24 14:40:00 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
497
+ 2024-12-24 14:40:00 | ERROR | stderr | self.thread.join(timeout=5)
498
+ 2024-12-24 14:40:00 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
499
+ 2024-12-24 14:40:00 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
500
+ 2024-12-24 14:40:00 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
501
+ 2024-12-24 14:40:00 | ERROR | stderr | if lock.acquire(block, timeout):
502
+ 2024-12-24 14:40:00 | ERROR | stderr | KeyboardInterrupt
503
+ 2024-12-24 14:40:00 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
504
+ 2024-12-24 14:40:00 | ERROR | stderr | Traceback (most recent call last):
505
+ 2024-12-24 14:40:00 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
506
+ 2024-12-24 14:40:00 | ERROR | stderr | lock.acquire()
507
+ 2024-12-24 14:40:00 | ERROR | stderr | KeyboardInterrupt:
508
+ 2024-12-24 14:40:05 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
509
+ 2024-12-24 14:40:05 | INFO | stdout |
510
+ 2024-12-24 14:40:05 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
511
+ 2024-12-24 14:40:16 | INFO | gradio_web_server_image_generation_multi | bothbad_vote (named). ip: 127.0.0.1
512
+ 2024-12-24 14:40:16 | ERROR | stderr | Traceback (most recent call last):
513
+ 2024-12-24 14:40:16 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/queueing.py", line 625, in process_events
514
+ 2024-12-24 14:40:16 | ERROR | stderr | response = await route_utils.call_process_api(
515
+ 2024-12-24 14:40:16 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/route_utils.py", line 322, in call_process_api
516
+ 2024-12-24 14:40:16 | ERROR | stderr | output = await app.get_blocks().process_api(
517
+ 2024-12-24 14:40:16 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2047, in process_api
518
+ 2024-12-24 14:40:16 | ERROR | stderr | result = await self.call_function(
519
+ 2024-12-24 14:40:16 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 1594, in call_function
520
+ 2024-12-24 14:40:16 | ERROR | stderr | prediction = await anyio.to_thread.run_sync( # type: ignore
521
+ 2024-12-24 14:40:16 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/anyio/to_thread.py", line 56, in run_sync
522
+ 2024-12-24 14:40:16 | ERROR | stderr | return await get_async_backend().run_sync_in_worker_thread(
523
+ 2024-12-24 14:40:16 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 2505, in run_sync_in_worker_thread
524
+ 2024-12-24 14:40:16 | ERROR | stderr | return await future
525
+ 2024-12-24 14:40:16 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 1005, in run
526
+ 2024-12-24 14:40:16 | ERROR | stderr | result = context.run(func, *args)
527
+ 2024-12-24 14:40:16 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/utils.py", line 869, in wrapper
528
+ 2024-12-24 14:40:16 | ERROR | stderr | response = f(*args, **kwargs)
529
+ 2024-12-24 14:40:16 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/serve/vote_utils.py", line 65, in bothbad_vote_last_response_igm
530
+ 2024-12-24 14:40:16 | ERROR | stderr | vote_last_response_igm(
531
+ 2024-12-24 14:40:16 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/serve/vote_utils.py", line 27, in vote_last_response_igm
532
+ 2024-12-24 14:40:16 | ERROR | stderr | "models": [x.name for x in states],
533
+ 2024-12-24 14:40:16 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/serve/vote_utils.py", line 27, in <listcomp>
534
+ 2024-12-24 14:40:16 | ERROR | stderr | "models": [x.name for x in states],
535
+ 2024-12-24 14:40:16 | ERROR | stderr | AttributeError: 'NoneType' object has no attribute 'name'
536
+ 2024-12-24 14:42:01 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
537
+ 2024-12-24 14:42:01 | ERROR | stderr | Traceback (most recent call last):
538
+ 2024-12-24 14:42:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/queueing.py", line 625, in process_events
539
+ 2024-12-24 14:42:01 | ERROR | stderr | response = await route_utils.call_process_api(
540
+ 2024-12-24 14:42:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/route_utils.py", line 322, in call_process_api
541
+ 2024-12-24 14:42:01 | ERROR | stderr | output = await app.get_blocks().process_api(
542
+ 2024-12-24 14:42:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2047, in process_api
543
+ 2024-12-24 14:42:01 | ERROR | stderr | result = await self.call_function(
544
+ 2024-12-24 14:42:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 1594, in call_function
545
+ 2024-12-24 14:42:01 | ERROR | stderr | prediction = await anyio.to_thread.run_sync( # type: ignore
546
+ 2024-12-24 14:42:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/anyio/to_thread.py", line 56, in run_sync
547
+ 2024-12-24 14:42:01 | ERROR | stderr | return await get_async_backend().run_sync_in_worker_thread(
548
+ 2024-12-24 14:42:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 2505, in run_sync_in_worker_thread
549
+ 2024-12-24 14:42:01 | ERROR | stderr | return await future
550
+ 2024-12-24 14:42:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 1005, in run
551
+ 2024-12-24 14:42:01 | ERROR | stderr | result = context.run(func, *args)
552
+ 2024-12-24 14:42:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/utils.py", line 869, in wrapper
553
+ 2024-12-24 14:42:01 | ERROR | stderr | response = f(*args, **kwargs)
554
+ 2024-12-24 14:42:01 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/serve/vote_utils.py", line 41, in leftvote_last_response_igm
555
+ 2024-12-24 14:42:01 | ERROR | stderr | vote_last_response_igm(
556
+ 2024-12-24 14:42:01 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/serve/vote_utils.py", line 32, in vote_last_response_igm
557
+ 2024-12-24 14:42:01 | ERROR | stderr | append_json_item_on_log_server(data, get_conv_log_filename())
558
+ 2024-12-24 14:42:01 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/serve/utils.py", line 206, in append_json_item_on_log_server
559
+ 2024-12-24 14:42:01 | ERROR | stderr | response = requests.post(url, data={'json_str': json_item, 'file_name': log_file})
560
+ 2024-12-24 14:42:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/requests/api.py", line 115, in post
561
+ 2024-12-24 14:42:01 | ERROR | stderr | return request("post", url, data=data, json=json, **kwargs)
562
+ 2024-12-24 14:42:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/requests/api.py", line 59, in request
563
+ 2024-12-24 14:42:01 | ERROR | stderr | return session.request(method=method, url=url, **kwargs)
564
+ 2024-12-24 14:42:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/requests/sessions.py", line 575, in request
565
+ 2024-12-24 14:42:01 | ERROR | stderr | prep = self.prepare_request(req)
566
+ 2024-12-24 14:42:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/requests/sessions.py", line 484, in prepare_request
567
+ 2024-12-24 14:42:01 | ERROR | stderr | p.prepare(
568
+ 2024-12-24 14:42:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/requests/models.py", line 367, in prepare
569
+ 2024-12-24 14:42:01 | ERROR | stderr | self.prepare_url(url, params)
570
+ 2024-12-24 14:42:01 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/requests/models.py", line 438, in prepare_url
571
+ 2024-12-24 14:42:01 | ERROR | stderr | raise MissingSchema(
572
+ 2024-12-24 14:42:01 | ERROR | stderr | requests.exceptions.MissingSchema: Invalid URL '/logs/append_json': No scheme supplied. Perhaps you meant https:///logs/append_json?
573
+ 2024-12-24 15:03:08 | INFO | stdout | Keyboard interruption in main thread... closing server.
574
+ 2024-12-24 15:03:08 | ERROR | stderr | Traceback (most recent call last):
575
+ 2024-12-24 15:03:08 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
576
+ 2024-12-24 15:03:08 | ERROR | stderr | time.sleep(0.1)
577
+ 2024-12-24 15:03:08 | ERROR | stderr | KeyboardInterrupt
578
+ 2024-12-24 15:03:08 | ERROR | stderr |
579
+ 2024-12-24 15:03:08 | ERROR | stderr | During handling of the above exception, another exception occurred:
580
+ 2024-12-24 15:03:08 | ERROR | stderr |
581
+ 2024-12-24 15:03:08 | ERROR | stderr | Traceback (most recent call last):
582
+ 2024-12-24 15:03:08 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
583
+ 2024-12-24 15:03:08 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
584
+ 2024-12-24 15:03:08 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
585
+ 2024-12-24 15:03:08 | ERROR | stderr | self.block_thread()
586
+ 2024-12-24 15:03:08 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
587
+ 2024-12-24 15:03:08 | ERROR | stderr | self.server.close()
588
+ 2024-12-24 15:03:08 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
589
+ 2024-12-24 15:03:08 | ERROR | stderr | self.thread.join(timeout=5)
590
+ 2024-12-24 15:03:08 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
591
+ 2024-12-24 15:03:08 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
592
+ 2024-12-24 15:03:08 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
593
+ 2024-12-24 15:03:08 | ERROR | stderr | if lock.acquire(block, timeout):
594
+ 2024-12-24 15:03:08 | ERROR | stderr | KeyboardInterrupt
595
+ 2024-12-24 15:03:13 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
596
+ 2024-12-24 15:03:13 | INFO | stdout |
597
+ 2024-12-24 15:03:13 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
598
+ 2024-12-24 15:04:38 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
599
+ 2024-12-24 15:05:17 | INFO | stdout | Keyboard interruption in main thread... closing server.
600
+ 2024-12-24 15:05:17 | ERROR | stderr | Traceback (most recent call last):
601
+ 2024-12-24 15:05:17 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
602
+ 2024-12-24 15:05:17 | ERROR | stderr | time.sleep(0.1)
603
+ 2024-12-24 15:05:17 | ERROR | stderr | KeyboardInterrupt
604
+ 2024-12-24 15:05:17 | ERROR | stderr |
605
+ 2024-12-24 15:05:17 | ERROR | stderr | During handling of the above exception, another exception occurred:
606
+ 2024-12-24 15:05:17 | ERROR | stderr |
607
+ 2024-12-24 15:05:17 | ERROR | stderr | Traceback (most recent call last):
608
+ 2024-12-24 15:05:17 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
609
+ 2024-12-24 15:05:17 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
610
+ 2024-12-24 15:05:17 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
611
+ 2024-12-24 15:05:17 | ERROR | stderr | self.block_thread()
612
+ 2024-12-24 15:05:17 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
613
+ 2024-12-24 15:05:17 | ERROR | stderr | self.server.close()
614
+ 2024-12-24 15:05:17 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
615
+ 2024-12-24 15:05:17 | ERROR | stderr | self.thread.join(timeout=5)
616
+ 2024-12-24 15:05:17 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
617
+ 2024-12-24 15:05:17 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
618
+ 2024-12-24 15:05:17 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
619
+ 2024-12-24 15:05:17 | ERROR | stderr | if lock.acquire(block, timeout):
620
+ 2024-12-24 15:05:17 | ERROR | stderr | KeyboardInterrupt
621
+ 2024-12-24 15:05:17 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
622
+ 2024-12-24 15:05:17 | ERROR | stderr | Traceback (most recent call last):
623
+ 2024-12-24 15:05:17 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
624
+ 2024-12-24 15:05:17 | ERROR | stderr | lock.acquire()
625
+ 2024-12-24 15:05:17 | ERROR | stderr | KeyboardInterrupt:
626
+ 2024-12-24 15:07:04 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
627
+ 2024-12-24 15:07:04 | INFO | stdout |
628
+ 2024-12-24 15:07:04 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
629
+ 2024-12-24 15:07:09 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
630
+ 2024-12-24 15:08:19 | INFO | stdout | Keyboard interruption in main thread... closing server.
631
+ 2024-12-24 15:08:19 | ERROR | stderr | Traceback (most recent call last):
632
+ 2024-12-24 15:08:19 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
633
+ 2024-12-24 15:08:19 | ERROR | stderr | time.sleep(0.1)
634
+ 2024-12-24 15:08:19 | ERROR | stderr | KeyboardInterrupt
635
+ 2024-12-24 15:08:19 | ERROR | stderr |
636
+ 2024-12-24 15:08:19 | ERROR | stderr | During handling of the above exception, another exception occurred:
637
+ 2024-12-24 15:08:19 | ERROR | stderr |
638
+ 2024-12-24 15:08:19 | ERROR | stderr | Traceback (most recent call last):
639
+ 2024-12-24 15:08:19 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
640
+ 2024-12-24 15:08:19 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
641
+ 2024-12-24 15:08:19 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
642
+ 2024-12-24 15:08:19 | ERROR | stderr | self.block_thread()
643
+ 2024-12-24 15:08:19 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
644
+ 2024-12-24 15:08:19 | ERROR | stderr | self.server.close()
645
+ 2024-12-24 15:08:19 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
646
+ 2024-12-24 15:08:19 | ERROR | stderr | self.thread.join(timeout=5)
647
+ 2024-12-24 15:08:19 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
648
+ 2024-12-24 15:08:19 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
649
+ 2024-12-24 15:08:19 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
650
+ 2024-12-24 15:08:19 | ERROR | stderr | if lock.acquire(block, timeout):
651
+ 2024-12-24 15:08:19 | ERROR | stderr | KeyboardInterrupt
652
+ 2024-12-24 15:08:20 | ERROR | stderr | Exception ignored in: <module 'threading' from '/opt/anaconda3/envs/tiger/lib/python3.10/threading.py'>
653
+ 2024-12-24 15:08:20 | ERROR | stderr | Traceback (most recent call last):
654
+ 2024-12-24 15:08:20 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1567, in _shutdown
655
+ 2024-12-24 15:08:20 | ERROR | stderr | lock.acquire()
656
+ 2024-12-24 15:08:20 | ERROR | stderr | KeyboardInterrupt:
657
+ 2024-12-24 15:12:22 | INFO | stdout | {'t2i_generation': PosixPath('arena_elo/results/latest/elo_results_t2i_generation.pkl'), 'video_generation': PosixPath('arena_elo/results/latest/elo_results_video_generation.pkl'), 'image_editing': PosixPath('arena_elo/results/latest/elo_results_image_editing.pkl')}
658
+ 2024-12-24 15:12:22 | INFO | stdout | {'t2i_generation': PosixPath('arena_elo/results/latest/t2i_generation_leaderboard.csv'), 'video_generation': PosixPath('arena_elo/results/latest/video_generation_leaderboard.csv'), 'image_editing': PosixPath('arena_elo/results/latest/image_editing_leaderboard.csv')}
659
+ 2024-12-24 15:15:46 | ERROR | stderr | Traceback (most recent call last):
660
+ 2024-12-24 15:15:46 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 79, in <module>
661
+ 2024-12-24 15:15:46 | ERROR | stderr | input()
662
+ 2024-12-24 15:15:46 | ERROR | stderr | KeyboardInterrupt
663
+ 2024-12-24 15:26:57 | INFO | stdout | arena_elo/results/latest/t2i_generation_leaderboard.csv
664
+ 2024-12-24 15:26:57 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
665
+ 2024-12-24 15:26:57 | INFO | stdout |
666
+ 2024-12-24 15:26:57 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
667
+ 2024-12-24 15:56:17 | INFO | stdout | Keyboard interruption in main thread... closing server.
668
+ 2024-12-24 15:56:17 | ERROR | stderr | Traceback (most recent call last):
669
+ 2024-12-24 15:56:17 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
670
+ 2024-12-24 15:56:17 | ERROR | stderr | time.sleep(0.1)
671
+ 2024-12-24 15:56:17 | ERROR | stderr | KeyboardInterrupt
672
+ 2024-12-24 15:56:17 | ERROR | stderr |
673
+ 2024-12-24 15:56:17 | ERROR | stderr | During handling of the above exception, another exception occurred:
674
+ 2024-12-24 15:56:17 | ERROR | stderr |
675
+ 2024-12-24 15:56:17 | ERROR | stderr | Traceback (most recent call last):
676
+ 2024-12-24 15:56:17 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
677
+ 2024-12-24 15:56:17 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
678
+ 2024-12-24 15:56:17 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
679
+ 2024-12-24 15:56:17 | ERROR | stderr | self.block_thread()
680
+ 2024-12-24 15:56:17 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
681
+ 2024-12-24 15:56:17 | ERROR | stderr | self.server.close()
682
+ 2024-12-24 15:56:17 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
683
+ 2024-12-24 15:56:17 | ERROR | stderr | self.thread.join(timeout=5)
684
+ 2024-12-24 15:56:17 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
685
+ 2024-12-24 15:56:17 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
686
+ 2024-12-24 15:56:17 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
687
+ 2024-12-24 15:56:17 | ERROR | stderr | if lock.acquire(block, timeout):
688
+ 2024-12-24 15:56:17 | ERROR | stderr | KeyboardInterrupt
689
+ 2024-12-24 16:52:26 | INFO | stdout | arena_elo/results/latest/t2i_generation_leaderboard.csv
690
+ 2024-12-24 16:52:27 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
691
+ 2024-12-24 16:52:27 | INFO | stdout |
692
+ 2024-12-24 16:52:27 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
693
+ 2024-12-24 16:53:47 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
694
+ 2024-12-24 16:54:12 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
695
+ 2024-12-24 16:54:24 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
696
+ 2024-12-24 16:54:36 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
697
+ 2024-12-24 16:54:55 | INFO | gradio_web_server_image_generation_multi | bothbad_vote (named). ip: 127.0.0.1
698
+ 2024-12-24 16:55:03 | INFO | gradio_web_server_image_generation_multi | bothbad_vote (named). ip: 127.0.0.1
699
+ 2024-12-24 16:55:11 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
700
+ 2024-12-24 16:55:45 | INFO | stdout | Keyboard interruption in main thread... closing server.
701
+ 2024-12-24 16:55:46 | ERROR | stderr | Traceback (most recent call last):
702
+ 2024-12-24 16:55:46 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
703
+ 2024-12-24 16:55:46 | ERROR | stderr | time.sleep(0.1)
704
+ 2024-12-24 16:55:46 | ERROR | stderr | KeyboardInterrupt
705
+ 2024-12-24 16:55:46 | ERROR | stderr |
706
+ 2024-12-24 16:55:46 | ERROR | stderr | During handling of the above exception, another exception occurred:
707
+ 2024-12-24 16:55:46 | ERROR | stderr |
708
+ 2024-12-24 16:55:46 | ERROR | stderr | Traceback (most recent call last):
709
+ 2024-12-24 16:55:46 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
710
+ 2024-12-24 16:55:46 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
711
+ 2024-12-24 16:55:46 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
712
+ 2024-12-24 16:55:46 | ERROR | stderr | self.block_thread()
713
+ 2024-12-24 16:55:46 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
714
+ 2024-12-24 16:55:46 | ERROR | stderr | self.server.close()
715
+ 2024-12-24 16:55:46 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
716
+ 2024-12-24 16:55:46 | ERROR | stderr | self.thread.join(timeout=5)
717
+ 2024-12-24 16:55:46 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
718
+ 2024-12-24 16:55:46 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
719
+ 2024-12-24 16:55:46 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
720
+ 2024-12-24 16:55:46 | ERROR | stderr | if lock.acquire(block, timeout):
721
+ 2024-12-24 16:55:46 | ERROR | stderr | KeyboardInterrupt
722
+ 2024-12-24 17:57:24 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
723
+ 2024-12-24 17:57:24 | INFO | stdout |
724
+ 2024-12-24 17:57:24 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
725
+ 2024-12-24 17:57:39 | INFO | gradio_web_server_image_generation_multi | bothbad_vote (named). ip: 127.0.0.1
726
+ 2024-12-24 17:57:55 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
727
+ 2024-12-24 17:58:04 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
728
+ 2024-12-24 17:58:13 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
729
+ 2024-12-24 17:58:23 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
730
+ 2024-12-24 17:58:34 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
731
+ 2024-12-24 17:58:46 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
732
+ 2024-12-24 17:58:51 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
733
+ 2024-12-24 17:58:56 | INFO | gradio_web_server_image_generation_multi | bothbad_vote (named). ip: 127.0.0.1
734
+ 2024-12-24 17:59:11 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
735
+ 2024-12-24 17:59:26 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
736
+ 2024-12-24 17:59:40 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
737
+ 2024-12-24 17:59:44 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
738
+ 2024-12-24 17:59:49 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
739
+ 2024-12-24 18:00:05 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
740
+ 2024-12-24 18:00:12 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
741
+ 2024-12-24 18:00:19 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
742
+ 2024-12-24 18:00:25 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
743
+ 2024-12-24 18:00:32 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
744
+ 2024-12-24 18:00:46 | INFO | stdout | Keyboard interruption in main thread... closing server.
745
+ 2024-12-24 18:00:47 | ERROR | stderr | Traceback (most recent call last):
746
+ 2024-12-24 18:00:47 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
747
+ 2024-12-24 18:00:47 | ERROR | stderr | time.sleep(0.1)
748
+ 2024-12-24 18:00:47 | ERROR | stderr | KeyboardInterrupt
749
+ 2024-12-24 18:00:47 | ERROR | stderr |
750
+ 2024-12-24 18:00:47 | ERROR | stderr | During handling of the above exception, another exception occurred:
751
+ 2024-12-24 18:00:47 | ERROR | stderr |
752
+ 2024-12-24 18:00:47 | ERROR | stderr | Traceback (most recent call last):
753
+ 2024-12-24 18:00:47 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 78, in <module>
754
+ 2024-12-24 18:00:47 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
755
+ 2024-12-24 18:00:47 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
756
+ 2024-12-24 18:00:47 | ERROR | stderr | self.block_thread()
757
+ 2024-12-24 18:00:47 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
758
+ 2024-12-24 18:00:47 | ERROR | stderr | self.server.close()
759
+ 2024-12-24 18:00:47 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
760
+ 2024-12-24 18:00:47 | ERROR | stderr | self.thread.join(timeout=5)
761
+ 2024-12-24 18:00:47 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
762
+ 2024-12-24 18:00:47 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
763
+ 2024-12-24 18:00:47 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
764
+ 2024-12-24 18:00:47 | ERROR | stderr | if lock.acquire(block, timeout):
765
+ 2024-12-24 18:00:47 | ERROR | stderr | KeyboardInterrupt
766
+ 2024-12-25 10:12:09 | INFO | stdout | * Running on local URL: http://127.0.0.1:7860
767
+ 2024-12-25 10:12:09 | INFO | stdout |
768
+ 2024-12-25 10:12:09 | INFO | stdout | To create a public link, set `share=True` in `launch()`.
769
+ 2024-12-25 10:12:41 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
770
+ 2024-12-25 10:12:54 | INFO | gradio_web_server_image_generation_multi | bothbad_vote (named). ip: 127.0.0.1
771
+ 2024-12-25 10:13:17 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
772
+ 2024-12-25 10:13:24 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
773
+ 2024-12-25 10:13:30 | INFO | gradio_web_server_image_generation_multi | leftvote (named). ip: 127.0.0.1
774
+ 2024-12-25 10:18:33 | INFO | gradio_web_server_image_generation_multi | rightvote (named). ip: 127.0.0.1
775
+ 2024-12-25 10:18:53 | INFO | gradio_web_server_image_generation_multi | bothbad_vote (named). ip: 127.0.0.1
776
+ 2024-12-25 10:25:51 | INFO | stdout | Keyboard interruption in main thread... closing server.
777
+ 2024-12-25 10:25:52 | ERROR | stderr | Traceback (most recent call last):
778
+ 2024-12-25 10:25:52 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2869, in block_thread
779
+ 2024-12-25 10:25:52 | ERROR | stderr | time.sleep(0.1)
780
+ 2024-12-25 10:25:52 | ERROR | stderr | KeyboardInterrupt
781
+ 2024-12-25 10:25:52 | ERROR | stderr |
782
+ 2024-12-25 10:25:52 | ERROR | stderr | During handling of the above exception, another exception occurred:
783
+ 2024-12-25 10:25:52 | ERROR | stderr |
784
+ 2024-12-25 10:25:52 | ERROR | stderr | Traceback (most recent call last):
785
+ 2024-12-25 10:25:52 | ERROR | stderr | File "/Users/jasi/Documents/ali-vilab/modelscope/IDEA-Bench-Arena/app.py", line 64, in <module>
786
+ 2024-12-25 10:25:52 | ERROR | stderr | demo.queue(max_size=20).launch(server_port=server_port, root_path=ROOT_PATH, show_error=True)
787
+ 2024-12-25 10:25:52 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2774, in launch
788
+ 2024-12-25 10:25:52 | ERROR | stderr | self.block_thread()
789
+ 2024-12-25 10:25:52 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/blocks.py", line 2873, in block_thread
790
+ 2024-12-25 10:25:52 | ERROR | stderr | self.server.close()
791
+ 2024-12-25 10:25:52 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/site-packages/gradio/http_server.py", line 69, in close
792
+ 2024-12-25 10:25:52 | ERROR | stderr | self.thread.join(timeout=5)
793
+ 2024-12-25 10:25:52 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1100, in join
794
+ 2024-12-25 10:25:52 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
795
+ 2024-12-25 10:25:52 | ERROR | stderr | File "/opt/anaconda3/envs/tiger/lib/python3.10/threading.py", line 1116, in _wait_for_tstate_lock
796
+ 2024-12-25 10:25:52 | ERROR | stderr | if lock.acquire(block, timeout):
797
+ 2024-12-25 10:25:52 | ERROR | stderr | KeyboardInterrupt