dlimeng commited on
Commit
f3ffe2b
·
1 Parent(s): fdf4060

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -8
app.py CHANGED
@@ -2,28 +2,32 @@ import gradio as gr
2
  import matplotlib.pyplot as plt
3
  import io
4
  import numpy as np
5
- import base64
6
  from PIL import Image
7
  import requests
8
  import json
 
9
 
10
  # 将图像转换为 base64,以便在 gradio 中显示
11
- def get_image_data(plt):
12
  buf = io.BytesIO()
13
- plt.savefig(buf, format='PNG')
14
  buf.seek(0)
15
  img = Image.open(buf)
16
  return img
17
 
18
  # 执行 Python 代码并生成图像
19
  def execute_code(code):
20
- exec(code)
21
- return get_image_data(plt)
 
 
 
 
 
22
 
23
  def gpt_inference(base_url, model, openai_key, prompt):
24
 
25
  newprompt = f'Write Python code that does the following: \n\n{prompt}\n\nNote, the code is going to be executed in a Jupyter Python kernel.\n\nLast instruction, and this is the most important, just return code. No other outputs, as your full response will directly be executed in the kernel.'
26
-
27
 
28
  data = {
29
  "model": model,
@@ -41,9 +45,7 @@ def gpt_inference(base_url, model, openai_key, prompt):
41
  "Authorization": f"Bearer {openai_key}",
42
  }
43
 
44
-
45
  response = requests.post(f"{base_url}/v1/chat/completions", headers=headers, data=json.dumps(data))
46
- print(f"text: {response.text}")
47
 
48
  def extract_code(text):
49
  # Match triple backtick blocks first
 
2
  import matplotlib.pyplot as plt
3
  import io
4
  import numpy as np
 
5
  from PIL import Image
6
  import requests
7
  import json
8
+ import re
9
 
10
  # 将图像转换为 base64,以便在 gradio 中显示
11
+ def get_image_data(fig):
12
  buf = io.BytesIO()
13
+ fig.savefig(buf, format='PNG')
14
  buf.seek(0)
15
  img = Image.open(buf)
16
  return img
17
 
18
  # 执行 Python 代码并生成图像
19
  def execute_code(code):
20
+ namespace = {}
21
+ exec(code, namespace)
22
+ fig = namespace.get('fig') # Assume the code generates a matplotlib figure named 'fig'
23
+ if fig:
24
+ return get_image_data(fig)
25
+ else:
26
+ raise ValueError("The code did not generate a matplotlib figure named 'fig'")
27
 
28
  def gpt_inference(base_url, model, openai_key, prompt):
29
 
30
  newprompt = f'Write Python code that does the following: \n\n{prompt}\n\nNote, the code is going to be executed in a Jupyter Python kernel.\n\nLast instruction, and this is the most important, just return code. No other outputs, as your full response will directly be executed in the kernel.'
 
31
 
32
  data = {
33
  "model": model,
 
45
  "Authorization": f"Bearer {openai_key}",
46
  }
47
 
 
48
  response = requests.post(f"{base_url}/v1/chat/completions", headers=headers, data=json.dumps(data))
 
49
 
50
  def extract_code(text):
51
  # Match triple backtick blocks first