Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -31,28 +31,29 @@ object_labels = [
|
|
31 |
EXAMPLE_IMAGE_URL = "https://www.watercoloraffair.com/wp-content/uploads/2023/04/monet-houses-of-parliament-low-key.jpg" # Square example image
|
32 |
example_image = Image.open(BytesIO(requests.get(EXAMPLE_IMAGE_URL).content))
|
33 |
|
34 |
-
# Updated process_chat function
|
35 |
def process_chat(user_text):
|
36 |
if not user_text.strip():
|
37 |
-
yield "⚠️ Please enter a question."
|
38 |
return
|
39 |
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
|
|
|
|
56 |
|
57 |
|
58 |
# Function to analyze image contrast
|
|
|
31 |
EXAMPLE_IMAGE_URL = "https://www.watercoloraffair.com/wp-content/uploads/2023/04/monet-houses-of-parliament-low-key.jpg" # Square example image
|
32 |
example_image = Image.open(BytesIO(requests.get(EXAMPLE_IMAGE_URL).content))
|
33 |
|
|
|
34 |
def process_chat(user_text):
|
35 |
if not user_text.strip():
|
36 |
+
yield "⚠️ Please enter a valid question."
|
37 |
return
|
38 |
|
39 |
+
try:
|
40 |
+
# Streaming responses from OpenAI API
|
41 |
+
response = openai.Chat.create(
|
42 |
+
model="gpt-4",
|
43 |
+
messages=[
|
44 |
+
{"role": "system", "content": "You are a helpful assistant named Diane specializing in digital art advice."},
|
45 |
+
{"role": "user", "content": user_text},
|
46 |
+
],
|
47 |
+
stream=True # Enable streaming
|
48 |
+
)
|
49 |
+
response_text = ""
|
50 |
+
for chunk in response:
|
51 |
+
if chunk.get("choices") and chunk["choices"][0].get("delta", {}).get("content"):
|
52 |
+
token = chunk["choices"][0]["delta"]["content"]
|
53 |
+
response_text += token
|
54 |
+
yield response_text
|
55 |
+
except Exception as e:
|
56 |
+
yield f"❌ An error occurred: {str(e)}"
|
57 |
|
58 |
|
59 |
# Function to analyze image contrast
|