ndurner commited on
Commit
99d0cac
·
1 Parent(s): 0e1ed46

image export/import

Browse files
Files changed (2) hide show
  1. app.py +59 -7
  2. llm.py +3 -0
app.py CHANGED
@@ -1,7 +1,9 @@
1
  import gradio as gr
2
  import json
3
- import os
4
  import boto3
 
 
5
 
6
  from settings_mgr import generate_download_settings_js, generate_upload_settings_js
7
  from llm import LLM, log_to_console
@@ -86,12 +88,33 @@ def import_history(history, file):
86
  # Check if 'history' key exists for backward compatibility
87
  if 'history' in import_data:
88
  history = import_data['history']
89
- system_prompt.value = import_data.get('system_prompt', '') # Set default if not present
90
  else:
91
  # Assume it's an old format with only history data
92
  history = import_data
 
93
 
94
- return history, system_prompt.value # Return system prompt value to be set in the UI
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
95
 
96
  with gr.Blocks(delete_cache=(86400, 86400)) as demo:
97
  gr.Markdown("# Amazon™️ Bedrock™️ Chat™️ (Nils' Version™️) feat. Mistral™️ AI & Anthropic™️ Claude™️")
@@ -171,10 +194,39 @@ with gr.Blocks(delete_cache=(86400, 86400)) as demo:
171
  with gr.Accordion("Import/Export", open = False):
172
  import_button = gr.UploadButton("History Import")
173
  export_button = gr.Button("History Export")
174
- export_button.click(lambda: None, [chatbot, system_prompt], js="""
175
- (chat_history, system_prompt) => {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
176
  const export_data = {
177
- history: chat_history,
178
  system_prompt: system_prompt
179
  };
180
  const history_json = JSON.stringify(export_data);
@@ -188,7 +240,7 @@ with gr.Blocks(delete_cache=(86400, 86400)) as demo:
188
  document.body.removeChild(a);
189
  URL.revokeObjectURL(url);
190
  }
191
- """)
192
  dl_button = gr.Button("File download")
193
  dl_button.click(lambda: None, [chatbot], js="""
194
  (chat_history) => {
 
1
  import gradio as gr
2
  import json
3
+ import io
4
  import boto3
5
+ import base64
6
+ from PIL import Image
7
 
8
  from settings_mgr import generate_download_settings_js, generate_upload_settings_js
9
  from llm import LLM, log_to_console
 
88
  # Check if 'history' key exists for backward compatibility
89
  if 'history' in import_data:
90
  history = import_data['history']
91
+ system_prompt_value = import_data.get('system_prompt', '') # Set default if not present
92
  else:
93
  # Assume it's an old format with only history data
94
  history = import_data
95
+ system_prompt_value = ''
96
 
97
+ # Process the history to handle image data
98
+ processed_history = []
99
+ for pair in history:
100
+ processed_pair = []
101
+ for message in pair:
102
+ if isinstance(message, dict) and 'file' in message and 'data' in message['file']:
103
+ # Create a gradio.Image from the base64 data
104
+ image_data = base64.b64decode(message['file']['data'].split(',')[1])
105
+ img = Image.open(io.BytesIO(image_data))
106
+ gr_image = gr.Image(img)
107
+ processed_pair.append(gr_image)
108
+
109
+ gr.Warning("Reusing images across sessions is limited to one conversation turn")
110
+ else:
111
+ processed_pair.append(message)
112
+ processed_history.append(processed_pair)
113
+
114
+ return processed_history, system_prompt_value
115
+
116
+ def export_history(h, s):
117
+ pass
118
 
119
  with gr.Blocks(delete_cache=(86400, 86400)) as demo:
120
  gr.Markdown("# Amazon™️ Bedrock™️ Chat™️ (Nils' Version™️) feat. Mistral™️ AI & Anthropic™️ Claude™️")
 
194
  with gr.Accordion("Import/Export", open = False):
195
  import_button = gr.UploadButton("History Import")
196
  export_button = gr.Button("History Export")
197
+ export_button.click(export_history, [chatbot, system_prompt], js="""
198
+ async (chat_history, system_prompt) => {
199
+ console.log('Chat History:', JSON.stringify(chat_history, null, 2));
200
+
201
+ async function fetchAndEncodeImage(url) {
202
+ const response = await fetch(url);
203
+ const blob = await response.blob();
204
+ return new Promise((resolve, reject) => {
205
+ const reader = new FileReader();
206
+ reader.onloadend = () => resolve(reader.result);
207
+ reader.onerror = reject;
208
+ reader.readAsDataURL(blob);
209
+ });
210
+ }
211
+
212
+ const processedHistory = await Promise.all(chat_history.map(async (pair) => {
213
+ return await Promise.all(pair.map(async (message) => {
214
+ if (message && message.file && message.file.url) {
215
+ const base64Image = await fetchAndEncodeImage(message.file.url);
216
+ return {
217
+ ...message,
218
+ file: {
219
+ ...message.file,
220
+ data: base64Image
221
+ }
222
+ };
223
+ }
224
+ return message;
225
+ }));
226
+ }));
227
+
228
  const export_data = {
229
+ history: processedHistory,
230
  system_prompt: system_prompt
231
  };
232
  const history_json = JSON.stringify(export_data);
 
240
  document.body.removeChild(a);
241
  URL.revokeObjectURL(url);
242
  }
243
+ """)
244
  dl_button = gr.Button("File download")
245
  dl_button.click(lambda: None, [chatbot], js="""
246
  (chat_history) => {
llm.py CHANGED
@@ -13,6 +13,7 @@ import re
13
  from PIL import Image
14
  import io
15
  import math
 
16
 
17
  # constants
18
  log_to_console = False
@@ -41,6 +42,8 @@ class LLM:
41
 
42
  if isinstance(human, tuple):
43
  user_msg_parts.extend(self._process_file(human[0]))
 
 
44
  else:
45
  user_msg_parts.extend([{"text": human}])
46
 
 
13
  from PIL import Image
14
  import io
15
  import math
16
+ import gradio
17
 
18
  # constants
19
  log_to_console = False
 
42
 
43
  if isinstance(human, tuple):
44
  user_msg_parts.extend(self._process_file(human[0]))
45
+ elif isinstance(human, gradio.Image):
46
+ user_msg_parts.extend(self._process_file(human.value["path"]))
47
  else:
48
  user_msg_parts.extend([{"text": human}])
49