Spaces:
Running
Running
[update]add list assistant files
Browse files- examples/test/test_assistant/assistant.py +0 -100
- examples/test/test_assistant/test_assistant.py +0 -64
- examples/test/test_assistant/test_assistant_chat.py +73 -0
- examples/test/test_assistant/test_assistant_delete.py +42 -0
- examples/test/test_assistant/test_assistant_file_list.py +65 -0
- examples/test/test_assistant/test_assistant_list.py +87 -0
- examples/test/test_assistant/test_assistant_retrieval.py +79 -0
- examples/test/test_assistant/test_asssitant_file_delete.py +45 -0
- examples/test/test_tts/test_tts.py +3 -2
- main.py +406 -27
examples/test/test_assistant/assistant.py
DELETED
@@ -1,100 +0,0 @@
|
|
1 |
-
#!/usr/bin/python3
|
2 |
-
# -*- coding: utf-8 -*-
|
3 |
-
import argparse
|
4 |
-
import json
|
5 |
-
|
6 |
-
from openai import OpenAI
|
7 |
-
from openai.pagination import SyncCursorPage
|
8 |
-
from openai.types.beta.threads import ThreadMessage
|
9 |
-
|
10 |
-
from project_settings import environment, project_path
|
11 |
-
|
12 |
-
|
13 |
-
def get_args():
|
14 |
-
parser = argparse.ArgumentParser()
|
15 |
-
parser.add_argument(
|
16 |
-
"--openai_api_key",
|
17 |
-
default=environment.get("openai_api_key", default=None, dtype=str),
|
18 |
-
type=str
|
19 |
-
)
|
20 |
-
args = parser.parse_args()
|
21 |
-
return args
|
22 |
-
|
23 |
-
|
24 |
-
def get_message_list(client: OpenAI, thread_id: str):
|
25 |
-
messages: SyncCursorPage[ThreadMessage] = client.beta.threads.messages.list(
|
26 |
-
thread_id=thread_id
|
27 |
-
)
|
28 |
-
result = list()
|
29 |
-
for message in messages.data:
|
30 |
-
|
31 |
-
content = list()
|
32 |
-
for msg in message.content:
|
33 |
-
content.append({
|
34 |
-
"text": {
|
35 |
-
"annotations": msg.text.annotations,
|
36 |
-
"value": msg.text.value,
|
37 |
-
},
|
38 |
-
"type": msg.type,
|
39 |
-
})
|
40 |
-
|
41 |
-
result.append({
|
42 |
-
"id": message.id,
|
43 |
-
"assistant_id": message.assistant_id,
|
44 |
-
"content": content,
|
45 |
-
"created_at": message.created_at,
|
46 |
-
"file_ids": message.file_ids,
|
47 |
-
"metadata": message.metadata,
|
48 |
-
"object": message.object,
|
49 |
-
"role": message.role,
|
50 |
-
"run_id": message.run_id,
|
51 |
-
"thread_id": message.thread_id,
|
52 |
-
|
53 |
-
})
|
54 |
-
|
55 |
-
return result
|
56 |
-
|
57 |
-
|
58 |
-
def get_assistant_list(client: OpenAI):
|
59 |
-
assistant_list = client.beta.assistants.list()
|
60 |
-
|
61 |
-
result = list()
|
62 |
-
for assistant in assistant_list.data:
|
63 |
-
|
64 |
-
tools = list()
|
65 |
-
for tool in assistant.tools:
|
66 |
-
tools.append({
|
67 |
-
"type": tool.type
|
68 |
-
})
|
69 |
-
|
70 |
-
result.append({
|
71 |
-
"id": assistant.id,
|
72 |
-
"created_at": assistant.created_at,
|
73 |
-
"description": assistant.description,
|
74 |
-
"file_ids": assistant.file_ids,
|
75 |
-
"instructions": assistant.instructions,
|
76 |
-
"metadata": assistant.metadata,
|
77 |
-
"model": assistant.model,
|
78 |
-
"name": assistant.name,
|
79 |
-
|
80 |
-
})
|
81 |
-
return result
|
82 |
-
|
83 |
-
|
84 |
-
def main():
|
85 |
-
args = get_args()
|
86 |
-
|
87 |
-
client = OpenAI()
|
88 |
-
|
89 |
-
message_list = get_message_list(client, thread_id="thread_HT89XoJP7ZoL4g2s0CWl01Oo")
|
90 |
-
message_list = json.dumps(message_list, indent=4, ensure_ascii=False)
|
91 |
-
print(message_list)
|
92 |
-
|
93 |
-
# assistant_list = get_assistant_list(client)
|
94 |
-
# assistant_list = json.dumps(assistant_list, indent=4, ensure_ascii=False)
|
95 |
-
# print(assistant_list)
|
96 |
-
return
|
97 |
-
|
98 |
-
|
99 |
-
if __name__ == '__main__':
|
100 |
-
main()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
examples/test/test_assistant/test_assistant.py
DELETED
@@ -1,64 +0,0 @@
|
|
1 |
-
#!/usr/bin/python3
|
2 |
-
# -*- coding: utf-8 -*-
|
3 |
-
"""
|
4 |
-
https://platform.openai.com/docs/assistants/overview
|
5 |
-
"""
|
6 |
-
import argparse
|
7 |
-
|
8 |
-
from openai import OpenAI
|
9 |
-
|
10 |
-
from project_settings import environment, project_path
|
11 |
-
|
12 |
-
|
13 |
-
def get_args():
|
14 |
-
parser = argparse.ArgumentParser()
|
15 |
-
parser.add_argument(
|
16 |
-
"--openai_api_key",
|
17 |
-
default=environment.get("openai_api_key", default=None, dtype=str),
|
18 |
-
type=str
|
19 |
-
)
|
20 |
-
args = parser.parse_args()
|
21 |
-
return args
|
22 |
-
|
23 |
-
|
24 |
-
client = OpenAI()
|
25 |
-
|
26 |
-
|
27 |
-
assistant = client.beta.assistants.create(
|
28 |
-
name="Math Tutor",
|
29 |
-
instructions="You are a personal math tutor. Write and run code to answer math questions.",
|
30 |
-
tools=[{"type": "code_interpreter"}],
|
31 |
-
model="gpt-4-1106-preview"
|
32 |
-
)
|
33 |
-
|
34 |
-
thread = client.beta.threads.create()
|
35 |
-
|
36 |
-
message = client.beta.threads.messages.create(
|
37 |
-
thread_id=thread.id,
|
38 |
-
role="user",
|
39 |
-
content="I need to solve the equation `3x + 11 = 14`. Can you help me?"
|
40 |
-
)
|
41 |
-
print(message)
|
42 |
-
|
43 |
-
run = client.beta.threads.runs.create(
|
44 |
-
thread_id=thread.id,
|
45 |
-
assistant_id=assistant.id,
|
46 |
-
instructions="Please address the user as Jane Doe. The user has a premium account."
|
47 |
-
)
|
48 |
-
|
49 |
-
run = client.beta.threads.runs.retrieve(
|
50 |
-
thread_id=thread.id,
|
51 |
-
run_id=run.id
|
52 |
-
)
|
53 |
-
|
54 |
-
messages = client.beta.threads.messages.list(
|
55 |
-
thread_id=thread.id
|
56 |
-
)
|
57 |
-
print(messages)
|
58 |
-
|
59 |
-
# ThreadMessage(id='msg_7DjZts4XzdQw4IQvPYg3zJYS', assistant_id=None, content=[MessageContentText(text=Text(annotations=[], value='I need to solve the equation `3x + 11 = 14`. Can you help me?'), type='text')], created_at=1699425801, file_ids=[], metadata={}, object='thread.message', role='user', run_id=None, thread_id='thread_HT89XoJP7ZoL4g2s0CWl01Oo')
|
60 |
-
# SyncCursorPage[ThreadMessage](data=[ThreadMessage(id='msg_7DjZts4XzdQw4IQvPYg3zJYS', assistant_id=None, content=[MessageContentText(text=Text(annotations=[], value='I need to solve the equation `3x + 11 = 14`. Can you help me?'), type='text')], created_at=1699425801, file_ids=[], metadata={}, object='thread.message', role='user', run_id=None, thread_id='thread_HT89XoJP7ZoL4g2s0CWl01Oo')], object='list', first_id='msg_7DjZts4XzdQw4IQvPYg3zJYS', last_id='msg_7DjZts4XzdQw4IQvPYg3zJYS', has_more=False)
|
61 |
-
|
62 |
-
|
63 |
-
if __name__ == '__main__':
|
64 |
-
pass
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
examples/test/test_assistant/test_assistant_chat.py
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/python3
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
"""
|
4 |
+
https://platform.openai.com/docs/assistants/overview
|
5 |
+
"""
|
6 |
+
import argparse
|
7 |
+
import time
|
8 |
+
|
9 |
+
from openai import OpenAI
|
10 |
+
from openai.pagination import SyncCursorPage
|
11 |
+
from openai.types.beta.threads import ThreadMessage
|
12 |
+
|
13 |
+
from project_settings import environment, project_path
|
14 |
+
|
15 |
+
|
16 |
+
def get_args():
|
17 |
+
parser = argparse.ArgumentParser()
|
18 |
+
parser.add_argument(
|
19 |
+
"--openai_api_key",
|
20 |
+
default=environment.get("openai_api_key", default=None, dtype=str),
|
21 |
+
type=str
|
22 |
+
)
|
23 |
+
args = parser.parse_args()
|
24 |
+
return args
|
25 |
+
|
26 |
+
|
27 |
+
def main():
|
28 |
+
args = get_args()
|
29 |
+
|
30 |
+
client = OpenAI(
|
31 |
+
api_key=args.openai_api_key
|
32 |
+
)
|
33 |
+
|
34 |
+
assistant = client.beta.assistants.create(
|
35 |
+
name="Math Tutor",
|
36 |
+
instructions="You are a personal math tutor. Write and run code to answer math questions.",
|
37 |
+
tools=[{"type": "code_interpreter"}],
|
38 |
+
model="gpt-4-1106-preview"
|
39 |
+
)
|
40 |
+
print(f"assistant.id: {assistant.id}")
|
41 |
+
|
42 |
+
thread = client.beta.threads.create()
|
43 |
+
print(f"thread.id: {thread.id}")
|
44 |
+
|
45 |
+
message = client.beta.threads.messages.create(
|
46 |
+
thread_id=thread.id,
|
47 |
+
role="user",
|
48 |
+
content="I need to solve the equation `3x + 11 = 14`. Can you help me?"
|
49 |
+
)
|
50 |
+
|
51 |
+
run = client.beta.threads.runs.create(
|
52 |
+
thread_id=thread.id,
|
53 |
+
assistant_id=assistant.id,
|
54 |
+
instructions="Please address the user as Jane Doe. The user has a premium account."
|
55 |
+
)
|
56 |
+
|
57 |
+
run = client.beta.threads.runs.retrieve(
|
58 |
+
thread_id=thread.id,
|
59 |
+
run_id=run.id
|
60 |
+
)
|
61 |
+
|
62 |
+
time.sleep(10)
|
63 |
+
|
64 |
+
messages = client.beta.threads.messages.list(
|
65 |
+
thread_id=thread.id
|
66 |
+
)
|
67 |
+
print(messages)
|
68 |
+
|
69 |
+
return
|
70 |
+
|
71 |
+
|
72 |
+
if __name__ == '__main__':
|
73 |
+
main()
|
examples/test/test_assistant/test_assistant_delete.py
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/python3
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import argparse
|
4 |
+
import json
|
5 |
+
import time
|
6 |
+
|
7 |
+
from openai import OpenAI
|
8 |
+
from openai.types.beta.asssitant_deleted import AsssitantDeleted
|
9 |
+
|
10 |
+
from project_settings import environment, project_path
|
11 |
+
|
12 |
+
|
13 |
+
def get_args():
|
14 |
+
parser = argparse.ArgumentParser()
|
15 |
+
parser.add_argument(
|
16 |
+
"--openai_api_key",
|
17 |
+
default=environment.get("openai_api_key", default=None, dtype=str),
|
18 |
+
type=str
|
19 |
+
)
|
20 |
+
args = parser.parse_args()
|
21 |
+
return args
|
22 |
+
|
23 |
+
|
24 |
+
def main():
|
25 |
+
args = get_args()
|
26 |
+
|
27 |
+
client = OpenAI(
|
28 |
+
api_key=args.openai_api_key
|
29 |
+
)
|
30 |
+
|
31 |
+
result: AsssitantDeleted = client.beta.assistants.delete(assistant_id="asst_bXMHkMvMfJX2J0JgK4nIHKqc")
|
32 |
+
print(f"result: {result}")
|
33 |
+
|
34 |
+
print(result.model_dump(mode="json"))
|
35 |
+
|
36 |
+
result = json.dumps(result.model_dump(mode="json"), indent=4, ensure_ascii=False)
|
37 |
+
print(result)
|
38 |
+
return
|
39 |
+
|
40 |
+
|
41 |
+
if __name__ == '__main__':
|
42 |
+
main()
|
examples/test/test_assistant/test_assistant_file_list.py
ADDED
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/python3
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import argparse
|
4 |
+
import json
|
5 |
+
import time
|
6 |
+
|
7 |
+
from openai import OpenAI
|
8 |
+
from openai.pagination import SyncCursorPage
|
9 |
+
from openai.types.beta.threads import ThreadMessage
|
10 |
+
from openai.types.beta.assistant import Assistant
|
11 |
+
|
12 |
+
from project_settings import environment, project_path
|
13 |
+
|
14 |
+
|
15 |
+
def get_args():
|
16 |
+
parser = argparse.ArgumentParser()
|
17 |
+
parser.add_argument(
|
18 |
+
"--openai_api_key",
|
19 |
+
default=environment.get("openai_api_key", default=None, dtype=str),
|
20 |
+
type=str
|
21 |
+
)
|
22 |
+
args = parser.parse_args()
|
23 |
+
return args
|
24 |
+
|
25 |
+
|
26 |
+
def main():
|
27 |
+
"""
|
28 |
+
{
|
29 |
+
"data": [
|
30 |
+
{
|
31 |
+
"id": "file-BuLs1LIuKWGRwmx8YDwQ5Vd7",
|
32 |
+
"bytes": 1206396,
|
33 |
+
"created_at": 1699443275,
|
34 |
+
"filename": "三国演义.txt",
|
35 |
+
"object": "file",
|
36 |
+
"purpose": "assistants",
|
37 |
+
"status": "processed",
|
38 |
+
"status_details": null
|
39 |
+
}
|
40 |
+
],
|
41 |
+
"object": "list",
|
42 |
+
"has_more": false
|
43 |
+
}
|
44 |
+
|
45 |
+
"""
|
46 |
+
args = get_args()
|
47 |
+
|
48 |
+
client = OpenAI(
|
49 |
+
api_key=args.openai_api_key
|
50 |
+
)
|
51 |
+
|
52 |
+
file_list = client.files.list()
|
53 |
+
print(f"file_list: {file_list}")
|
54 |
+
print(type(file_list))
|
55 |
+
print(file_list.model_dump(mode="python"))
|
56 |
+
print(file_list.model_dump(mode="json"))
|
57 |
+
|
58 |
+
file_list = json.dumps(file_list.model_dump(mode="json"), indent=4, ensure_ascii=False)
|
59 |
+
print(file_list)
|
60 |
+
|
61 |
+
return
|
62 |
+
|
63 |
+
|
64 |
+
if __name__ == '__main__':
|
65 |
+
main()
|
examples/test/test_assistant/test_assistant_list.py
ADDED
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/python3
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import argparse
|
4 |
+
import json
|
5 |
+
import time
|
6 |
+
|
7 |
+
from openai import OpenAI
|
8 |
+
from openai.pagination import SyncCursorPage
|
9 |
+
from openai.types.beta.threads import ThreadMessage
|
10 |
+
from openai.types.beta.assistant import Assistant
|
11 |
+
|
12 |
+
from project_settings import environment, project_path
|
13 |
+
|
14 |
+
|
15 |
+
def get_args():
|
16 |
+
parser = argparse.ArgumentParser()
|
17 |
+
parser.add_argument(
|
18 |
+
"--openai_api_key",
|
19 |
+
default=environment.get("openai_api_key", default=None, dtype=str),
|
20 |
+
type=str
|
21 |
+
)
|
22 |
+
args = parser.parse_args()
|
23 |
+
return args
|
24 |
+
|
25 |
+
|
26 |
+
def main():
|
27 |
+
"""
|
28 |
+
{
|
29 |
+
"data": [
|
30 |
+
{
|
31 |
+
"id": "asst_9wwRvH6Jr3LKvKEP0Tm4oeRy",
|
32 |
+
"created_at": 1699493281,
|
33 |
+
"description": null,
|
34 |
+
"file_ids": [],
|
35 |
+
"instructions": "You are a personal math tutor. Write and run code to answer math questions.",
|
36 |
+
"metadata": {},
|
37 |
+
"model": "gpt-4-1106-preview",
|
38 |
+
"name": "Math Tutor",
|
39 |
+
"object": "assistant",
|
40 |
+
"tools": []
|
41 |
+
},
|
42 |
+
{
|
43 |
+
"id": "asst_cjUW4TZY6TEV3xhBBJW6qCaN",
|
44 |
+
"created_at": 1699494925,
|
45 |
+
"description": null,
|
46 |
+
"file_ids": [
|
47 |
+
"file-hy90LKmHi5Y5jKYMr9WdlT91"
|
48 |
+
],
|
49 |
+
"instructions": "根据小说内容回答问题。",
|
50 |
+
"metadata": {},
|
51 |
+
"model": "gpt-4-1106-preview",
|
52 |
+
"name": "小说专家",
|
53 |
+
"object": "assistant",
|
54 |
+
"tools": [
|
55 |
+
{
|
56 |
+
"type": "retrieval"
|
57 |
+
}
|
58 |
+
]
|
59 |
+
},
|
60 |
+
],
|
61 |
+
"object": "list",
|
62 |
+
"first_id": "asst_blsH05YKxgb1YgC3e1xhrQOC",
|
63 |
+
"last_id": "asst_gGLXz0SiTopsjA6rq9y5mdS5",
|
64 |
+
"has_more": true
|
65 |
+
}
|
66 |
+
|
67 |
+
"""
|
68 |
+
args = get_args()
|
69 |
+
|
70 |
+
client = OpenAI(
|
71 |
+
api_key=args.openai_api_key
|
72 |
+
)
|
73 |
+
|
74 |
+
assistant_list: SyncCursorPage[Assistant] = client.beta.assistants.list()
|
75 |
+
print(f"assistant_list: {assistant_list}")
|
76 |
+
print(type(assistant_list))
|
77 |
+
print(assistant_list.model_dump(mode="python"))
|
78 |
+
print(assistant_list.model_dump(mode="json"))
|
79 |
+
|
80 |
+
assistant_list = json.dumps(assistant_list.model_dump(mode="json"), indent=4, ensure_ascii=False)
|
81 |
+
print(assistant_list)
|
82 |
+
|
83 |
+
return
|
84 |
+
|
85 |
+
|
86 |
+
if __name__ == '__main__':
|
87 |
+
main()
|
examples/test/test_assistant/test_assistant_retrieval.py
ADDED
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/python3
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import argparse
|
4 |
+
import time
|
5 |
+
|
6 |
+
from openai import OpenAI
|
7 |
+
|
8 |
+
from project_settings import environment, project_path
|
9 |
+
|
10 |
+
|
11 |
+
def get_args():
|
12 |
+
parser = argparse.ArgumentParser()
|
13 |
+
parser.add_argument(
|
14 |
+
"--openai_api_key",
|
15 |
+
default=environment.get("openai_api_key", default=None, dtype=str),
|
16 |
+
type=str
|
17 |
+
)
|
18 |
+
parser.add_argument(
|
19 |
+
"--document_file",
|
20 |
+
default=(project_path / "data/三国演义.txt").as_posix(),
|
21 |
+
type=str
|
22 |
+
)
|
23 |
+
args = parser.parse_args()
|
24 |
+
return args
|
25 |
+
|
26 |
+
|
27 |
+
def main():
|
28 |
+
args = get_args()
|
29 |
+
|
30 |
+
client = OpenAI(
|
31 |
+
api_key=args.openai_api_key
|
32 |
+
)
|
33 |
+
|
34 |
+
document = client.files.create(
|
35 |
+
file=open(args.document_file, "rb"),
|
36 |
+
purpose='assistants'
|
37 |
+
)
|
38 |
+
|
39 |
+
assistant = client.beta.assistants.create(
|
40 |
+
name="小说专家",
|
41 |
+
instructions="根据小说内容回答问题。",
|
42 |
+
tools=[{"type": "retrieval"}],
|
43 |
+
model="gpt-4-1106-preview",
|
44 |
+
file_ids=[document.id]
|
45 |
+
|
46 |
+
)
|
47 |
+
print(f"assistant.id: {assistant.id}")
|
48 |
+
|
49 |
+
thread = client.beta.threads.create()
|
50 |
+
print(f"thread.id: {thread.id}")
|
51 |
+
|
52 |
+
message = client.beta.threads.messages.create(
|
53 |
+
thread_id=thread.id,
|
54 |
+
role="user",
|
55 |
+
content="刘备和张飞是什么关系。"
|
56 |
+
)
|
57 |
+
|
58 |
+
run = client.beta.threads.runs.create(
|
59 |
+
thread_id=thread.id,
|
60 |
+
assistant_id=assistant.id,
|
61 |
+
)
|
62 |
+
|
63 |
+
run = client.beta.threads.runs.retrieve(
|
64 |
+
thread_id=thread.id,
|
65 |
+
run_id=run.id
|
66 |
+
)
|
67 |
+
|
68 |
+
time.sleep(10)
|
69 |
+
|
70 |
+
messages = client.beta.threads.messages.list(
|
71 |
+
thread_id=thread.id
|
72 |
+
)
|
73 |
+
print(messages)
|
74 |
+
|
75 |
+
return
|
76 |
+
|
77 |
+
|
78 |
+
if __name__ == '__main__':
|
79 |
+
main()
|
examples/test/test_assistant/test_asssitant_file_delete.py
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/python3
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import argparse
|
4 |
+
import json
|
5 |
+
import time
|
6 |
+
|
7 |
+
from openai import OpenAI
|
8 |
+
from openai.pagination import SyncCursorPage
|
9 |
+
from openai.types.beta.threads import ThreadMessage
|
10 |
+
from openai.types.beta.assistant import Assistant
|
11 |
+
|
12 |
+
from project_settings import environment, project_path
|
13 |
+
|
14 |
+
|
15 |
+
def get_args():
|
16 |
+
parser = argparse.ArgumentParser()
|
17 |
+
parser.add_argument(
|
18 |
+
"--openai_api_key",
|
19 |
+
default=environment.get("openai_api_key", default=None, dtype=str),
|
20 |
+
type=str
|
21 |
+
)
|
22 |
+
args = parser.parse_args()
|
23 |
+
return args
|
24 |
+
|
25 |
+
|
26 |
+
def main():
|
27 |
+
args = get_args()
|
28 |
+
|
29 |
+
client = OpenAI(
|
30 |
+
api_key=args.openai_api_key
|
31 |
+
)
|
32 |
+
|
33 |
+
result = client.files.delete(file_id="file-Rienw0VlV1Ed9QCLZcVmUfgI")
|
34 |
+
print(f"result: {result}")
|
35 |
+
|
36 |
+
print(result.model_dump(mode="json"))
|
37 |
+
|
38 |
+
result = json.dumps(result.model_dump(mode="json"), indent=4, ensure_ascii=False)
|
39 |
+
print(result)
|
40 |
+
|
41 |
+
return
|
42 |
+
|
43 |
+
|
44 |
+
if __name__ == '__main__':
|
45 |
+
main()
|
examples/test/test_tts/test_tts.py
CHANGED
@@ -27,12 +27,13 @@ def get_args():
|
|
27 |
)
|
28 |
parser.add_argument(
|
29 |
"--openai_input",
|
30 |
-
default="Today is a wonderful day to build something people love!",
|
|
|
31 |
type=str
|
32 |
)
|
33 |
parser.add_argument(
|
34 |
"--speech_file_path",
|
35 |
-
default=(Path(__file__).parent / "
|
36 |
type=str
|
37 |
)
|
38 |
parser.add_argument(
|
|
|
27 |
)
|
28 |
parser.add_argument(
|
29 |
"--openai_input",
|
30 |
+
# default="Today is a wonderful day to build something people love!",
|
31 |
+
default="오늘은 사람들이 좋아하는 것을 만들기에 좋은 날입니다!",
|
32 |
type=str
|
33 |
)
|
34 |
parser.add_argument(
|
35 |
"--speech_file_path",
|
36 |
+
default=(Path(__file__).parent / "speech_alloy_korean.mp3").as_posix(),
|
37 |
type=str
|
38 |
)
|
39 |
parser.add_argument(
|
main.py
CHANGED
@@ -5,16 +5,27 @@ https://huggingface.co/spaces/fffiloni/langchain-chat-with-pdf-openai
|
|
5 |
"""
|
6 |
import argparse
|
7 |
import json
|
|
|
8 |
import time
|
9 |
from typing import List, Tuple
|
10 |
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
import gradio as gr
|
|
|
12 |
from openai import OpenAI
|
13 |
from threading import Thread
|
14 |
import _queue
|
15 |
from queue import Queue
|
16 |
|
17 |
import project_settings as settings
|
|
|
|
|
|
|
18 |
|
19 |
|
20 |
def get_args():
|
@@ -34,19 +45,245 @@ def greet(question: str, history: List[Tuple[str, str]]):
|
|
34 |
return result
|
35 |
|
36 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
37 |
def get_message_list(client: OpenAI, thread_id: str):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
38 |
messages = client.beta.threads.messages.list(
|
39 |
thread_id=thread_id
|
40 |
)
|
|
|
41 |
|
42 |
result = list()
|
43 |
for message in messages.data:
|
44 |
|
45 |
content = list()
|
46 |
for msg in message.content:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
47 |
content.append({
|
48 |
"text": {
|
49 |
-
"annotations":
|
50 |
"value": msg.text.value,
|
51 |
},
|
52 |
"type": msg.type,
|
@@ -89,6 +326,43 @@ def convert_message_list_to_response(message_list: List[dict]) -> str:
|
|
89 |
return response
|
90 |
|
91 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
92 |
def streaming_refresh(openai_api_key: str,
|
93 |
thread_id: str,
|
94 |
queue: Queue,
|
@@ -121,8 +395,10 @@ def refresh(openai_api_key: str,
|
|
121 |
)
|
122 |
|
123 |
message_list = get_message_list(client, thread_id=thread_id)
|
124 |
-
|
125 |
-
|
|
|
|
|
126 |
|
127 |
|
128 |
def add_and_run(openai_api_key: str,
|
@@ -130,6 +406,10 @@ def add_and_run(openai_api_key: str,
|
|
130 |
thread_id: str,
|
131 |
name: str,
|
132 |
instructions: str,
|
|
|
|
|
|
|
|
|
133 |
model: str,
|
134 |
query: str,
|
135 |
):
|
@@ -137,13 +417,10 @@ def add_and_run(openai_api_key: str,
|
|
137 |
api_key=openai_api_key,
|
138 |
)
|
139 |
if assistant_id is None or len(assistant_id.strip()) == 0:
|
140 |
-
|
141 |
-
|
142 |
-
instructions
|
143 |
-
# tools=[{"type": "code_interpreter"}],
|
144 |
-
model=model,
|
145 |
)
|
146 |
-
assistant_id = assistant.id
|
147 |
|
148 |
if thread_id is None or len(thread_id.strip()) == 0:
|
149 |
thread = client.beta.threads.create()
|
@@ -192,7 +469,8 @@ def add_and_run(openai_api_key: str,
|
|
192 |
|
193 |
result = [
|
194 |
assistant_id, thread_id,
|
195 |
-
last_response
|
|
|
196 |
]
|
197 |
yield result
|
198 |
|
@@ -200,13 +478,13 @@ def add_and_run(openai_api_key: str,
|
|
200 |
def main():
|
201 |
args = get_args()
|
202 |
|
203 |
-
|
204 |
-
|
205 |
"""
|
206 |
|
207 |
# ui
|
208 |
with gr.Blocks() as blocks:
|
209 |
-
gr.Markdown(value=
|
210 |
|
211 |
with gr.Row():
|
212 |
# settings
|
@@ -221,22 +499,41 @@ def main():
|
|
221 |
|
222 |
name = gr.Textbox(label="name")
|
223 |
instructions = gr.Textbox(label="instructions")
|
|
|
224 |
|
225 |
model = gr.Dropdown(["gpt-4-1106-preview"], value="gpt-4-1106-preview", label="model")
|
226 |
|
227 |
# functions
|
228 |
-
|
229 |
|
230 |
# upload files
|
231 |
-
retrieval_files = gr.Files(label="
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
232 |
|
233 |
# chat
|
234 |
with gr.Column(scale=5):
|
235 |
-
|
236 |
-
query = gr.Textbox(lines=
|
237 |
-
|
238 |
-
# chat_bot = gr.Chatbot([], elem_id="context", height=400)
|
239 |
-
# text_box = gr.Textbox(show_label=False, placeholder="Enter text and press enter", container=False)
|
240 |
|
241 |
with gr.Row():
|
242 |
with gr.Column(scale=1):
|
@@ -246,7 +543,6 @@ def main():
|
|
246 |
|
247 |
# states
|
248 |
with gr.Column(scale=2):
|
249 |
-
# upload files
|
250 |
assistant_id = gr.Textbox(value=None, label="assistant_id")
|
251 |
thread_id = gr.Textbox(value=None, label="thread_id")
|
252 |
|
@@ -257,34 +553,117 @@ def main():
|
|
257 |
[
|
258 |
"Math Tutor",
|
259 |
"You are a personal math tutor. Write and run code to answer math questions.",
|
|
|
|
|
|
|
260 |
"gpt-4-1106-preview",
|
261 |
"123 * 524 等于多少?"
|
262 |
-
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
263 |
],
|
264 |
inputs=[
|
265 |
-
name, instructions, model,
|
266 |
query,
|
267 |
],
|
268 |
examples_per_page=5
|
269 |
)
|
270 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
271 |
# add and run
|
272 |
add_and_run_button.click(
|
273 |
add_and_run,
|
274 |
inputs=[
|
275 |
openai_api_key,
|
276 |
assistant_id, thread_id,
|
277 |
-
name, instructions, model,
|
278 |
query,
|
279 |
],
|
280 |
outputs=[
|
281 |
assistant_id, thread_id,
|
282 |
-
|
283 |
],
|
284 |
)
|
285 |
|
286 |
# refresh
|
287 |
-
|
288 |
refresh_button.click(
|
289 |
refresh,
|
290 |
inputs=[
|
@@ -292,7 +671,7 @@ def main():
|
|
292 |
thread_id,
|
293 |
],
|
294 |
outputs=[
|
295 |
-
|
296 |
]
|
297 |
)
|
298 |
|
|
|
5 |
"""
|
6 |
import argparse
|
7 |
import json
|
8 |
+
import logging
|
9 |
import time
|
10 |
from typing import List, Tuple
|
11 |
|
12 |
+
logging.basicConfig(
|
13 |
+
level=logging.DEBUG,
|
14 |
+
format="%(asctime)s %(levelname)s %(message)s",
|
15 |
+
datefmt="%Y-%m-%d %H:%M:%S",
|
16 |
+
)
|
17 |
+
|
18 |
import gradio as gr
|
19 |
+
import openai
|
20 |
from openai import OpenAI
|
21 |
from threading import Thread
|
22 |
import _queue
|
23 |
from queue import Queue
|
24 |
|
25 |
import project_settings as settings
|
26 |
+
from project_settings import project_path
|
27 |
+
|
28 |
+
logger = logging.getLogger(__name__)
|
29 |
|
30 |
|
31 |
def get_args():
|
|
|
45 |
return result
|
46 |
|
47 |
|
48 |
+
def click_create_assistant(openai_api_key: str,
|
49 |
+
name: str,
|
50 |
+
instructions: str,
|
51 |
+
description: str,
|
52 |
+
tools: str,
|
53 |
+
files: List[str],
|
54 |
+
file_ids: str,
|
55 |
+
model: str,
|
56 |
+
):
|
57 |
+
logger.info("click create assistant, name: {}".format(name))
|
58 |
+
client = OpenAI(
|
59 |
+
api_key=openai_api_key,
|
60 |
+
)
|
61 |
+
|
62 |
+
# tools
|
63 |
+
tools = str(tools).strip()
|
64 |
+
if tools is not None and len(tools) != 0:
|
65 |
+
tools = tools.split("\n")
|
66 |
+
tools = [json.loads(tool) for tool in tools if len(tool.strip()) != 0]
|
67 |
+
else:
|
68 |
+
tools = list()
|
69 |
+
|
70 |
+
# files
|
71 |
+
if files is not None and len(files) != 0:
|
72 |
+
files = [
|
73 |
+
client.files.create(
|
74 |
+
file=open(file, "rb"),
|
75 |
+
purpose='assistants'
|
76 |
+
) for file in files
|
77 |
+
]
|
78 |
+
else:
|
79 |
+
files = list()
|
80 |
+
|
81 |
+
# file_ids
|
82 |
+
file_ids = str(file_ids).strip()
|
83 |
+
if file_ids is not None and len(file_ids) != 0:
|
84 |
+
file_ids = file_ids.split("\n")
|
85 |
+
file_ids = [file_id.strip() for file_id in file_ids if len(file_id.strip()) != 0]
|
86 |
+
else:
|
87 |
+
file_ids = list()
|
88 |
+
|
89 |
+
# assistant
|
90 |
+
assistant = client.beta.assistants.create(
|
91 |
+
name=name,
|
92 |
+
instructions=instructions,
|
93 |
+
description=description,
|
94 |
+
tools=tools,
|
95 |
+
file_ids=file_ids + [file.id for file in files],
|
96 |
+
model=model,
|
97 |
+
)
|
98 |
+
assistant_id = assistant.id
|
99 |
+
|
100 |
+
# thread
|
101 |
+
thread = client.beta.threads.create()
|
102 |
+
thread_id = thread.id
|
103 |
+
|
104 |
+
return assistant_id, thread_id
|
105 |
+
|
106 |
+
|
107 |
+
def click_list_assistant(openai_api_key: str) -> str:
|
108 |
+
client = OpenAI(
|
109 |
+
api_key=openai_api_key,
|
110 |
+
)
|
111 |
+
assistant_list = client.beta.assistants.list()
|
112 |
+
assistant_list.model_dump(mode="json")
|
113 |
+
|
114 |
+
result = ""
|
115 |
+
for a in assistant_list["data"]:
|
116 |
+
assis = "id: \n{}\nname: \n{}\ndescription: \n{}\n\n".format(a["id"], a["name"], a["description"])
|
117 |
+
result += assis
|
118 |
+
|
119 |
+
return result
|
120 |
+
|
121 |
+
|
122 |
+
def click_delete_assistant(openai_api_key: str,
|
123 |
+
assistant_id: str) -> str:
|
124 |
+
logger.info("click delete assistant, assistant_id: {}".format(assistant_id))
|
125 |
+
|
126 |
+
client = OpenAI(
|
127 |
+
api_key=openai_api_key,
|
128 |
+
)
|
129 |
+
|
130 |
+
try:
|
131 |
+
assistant_deleted = client.beta.assistants.delete(assistant_id=assistant_id)
|
132 |
+
result = "success" if assistant_deleted.deleted else "failed"
|
133 |
+
except openai.NotFoundError as e:
|
134 |
+
result = e.message
|
135 |
+
return result
|
136 |
+
|
137 |
+
|
138 |
+
def click_list_file(openai_api_key: str):
|
139 |
+
client = OpenAI(
|
140 |
+
api_key=openai_api_key,
|
141 |
+
)
|
142 |
+
file_list = client.files.list()
|
143 |
+
file_list = file_list.model_dump(mode="json")
|
144 |
+
|
145 |
+
result = ""
|
146 |
+
for f in file_list["data"]:
|
147 |
+
file = "id: \n{}\nfilename: \n{}\nbytes: \n{}\nstatus: \n{}\n\n".format(
|
148 |
+
f["id"], f["filename"], f["bytes"], f["status"]
|
149 |
+
)
|
150 |
+
result += file
|
151 |
+
|
152 |
+
return result
|
153 |
+
|
154 |
+
|
155 |
+
def click_delete_file(openai_api_key: str,
|
156 |
+
file_id: str) -> str:
|
157 |
+
logger.info("click delete file, file_id: {}".format(file_id))
|
158 |
+
|
159 |
+
client = OpenAI(
|
160 |
+
api_key=openai_api_key,
|
161 |
+
)
|
162 |
+
|
163 |
+
try:
|
164 |
+
assistant_deleted = client.files.delete(file_id=file_id)
|
165 |
+
result = "success" if assistant_deleted.deleted else "failed"
|
166 |
+
except openai.NotFoundError as e:
|
167 |
+
result = e.message
|
168 |
+
return result
|
169 |
+
|
170 |
+
|
171 |
+
def click_upload_files(openai_api_key: str,
|
172 |
+
files: List[str],
|
173 |
+
):
|
174 |
+
logger.info("click upload files, files: {}".format(files))
|
175 |
+
|
176 |
+
client = OpenAI(
|
177 |
+
api_key=openai_api_key,
|
178 |
+
)
|
179 |
+
|
180 |
+
result = list()
|
181 |
+
if files is not None and len(files) != 0:
|
182 |
+
files = [
|
183 |
+
client.files.create(
|
184 |
+
file=open(file, "rb"),
|
185 |
+
purpose='assistants'
|
186 |
+
) for file in files
|
187 |
+
]
|
188 |
+
file_ids = [file.id for file in files]
|
189 |
+
result.extend(file_ids)
|
190 |
+
return result
|
191 |
+
|
192 |
+
|
193 |
def get_message_list(client: OpenAI, thread_id: str):
|
194 |
+
"""
|
195 |
+
SyncCursorPage[ThreadMessage](
|
196 |
+
data=[
|
197 |
+
ThreadMessage(
|
198 |
+
id='msg_kb0f2fyDC6OwMyXxKbUpcuBS',
|
199 |
+
assistant_id='asst_DzVVZkE0dIGe0gsOdsdn3A0w',
|
200 |
+
content=[
|
201 |
+
MessageContentText(
|
202 |
+
text=Text(
|
203 |
+
annotations=[
|
204 |
+
TextAnnotationFileCitation(
|
205 |
+
end_index=44,
|
206 |
+
file_citation=TextAnnotationFileCitationFileCitation(
|
207 |
+
file_id='file-IwzwXQkixMu7fvgGoC1alIWu',
|
208 |
+
quote='念刘备、关羽、张飞,虽然异姓,既结为兄弟,则同心协力,救困扶危;上报国家,下安黎庶。不求同年同月同日生,只愿同年同月同日死。皇天后土,实鉴此心,背义忘恩,天人共戮!”誓毕,拜玄德为兄,关羽次之,张飞为弟'
|
209 |
+
),
|
210 |
+
start_index=34,
|
211 |
+
text='【7†source】',
|
212 |
+
type='file_citation'
|
213 |
+
)
|
214 |
+
],
|
215 |
+
value='刘备和张飞虽然是异姓,但他们结为了兄弟,其中刘备被拜为兄,而张飞为弟【7†source】。'
|
216 |
+
),
|
217 |
+
type='text'
|
218 |
+
)
|
219 |
+
],
|
220 |
+
created_at=1699493845,
|
221 |
+
file_ids=[],
|
222 |
+
metadata={},
|
223 |
+
object='thread.message',
|
224 |
+
role='assistant',
|
225 |
+
run_id='run_zJYZX0KFEvEh2VG5x5zSLq9s',
|
226 |
+
thread_id='thread_3JWRdjvZDJTBgZ0tlrrKXnrt'
|
227 |
+
),
|
228 |
+
|
229 |
+
ThreadMessage(
|
230 |
+
id='msg_tc5Tit7q19S5TSgvmBauME3H',
|
231 |
+
assistant_id=None,
|
232 |
+
content=[
|
233 |
+
MessageContentText(
|
234 |
+
text=Text(
|
235 |
+
annotations=[],
|
236 |
+
value='刘备和张飞是什么关系。'
|
237 |
+
),
|
238 |
+
type='text'
|
239 |
+
)
|
240 |
+
],
|
241 |
+
created_at=1699493838,
|
242 |
+
file_ids=[],
|
243 |
+
metadata={},
|
244 |
+
object='thread.message',
|
245 |
+
role='user',
|
246 |
+
run_id=None,
|
247 |
+
thread_id='thread_3JWRdjvZDJTBgZ0tlrrKXnrt'
|
248 |
+
)
|
249 |
+
|
250 |
+
],
|
251 |
+
object='list',
|
252 |
+
first_id='msg_kb0f2fyDC6OwMyXxKbUpcuBS',
|
253 |
+
last_id='msg_tc5Tit7q19S5TSgvmBauME3H',
|
254 |
+
has_more=False
|
255 |
+
)
|
256 |
+
"""
|
257 |
messages = client.beta.threads.messages.list(
|
258 |
thread_id=thread_id
|
259 |
)
|
260 |
+
# print(messages)
|
261 |
|
262 |
result = list()
|
263 |
for message in messages.data:
|
264 |
|
265 |
content = list()
|
266 |
for msg in message.content:
|
267 |
+
annotations = list()
|
268 |
+
for annotation in msg.text.annotations:
|
269 |
+
a = {
|
270 |
+
"start_index": annotation.start_index,
|
271 |
+
"end_index": annotation.end_index,
|
272 |
+
"text": annotation.text,
|
273 |
+
"type": annotation.type,
|
274 |
+
}
|
275 |
+
|
276 |
+
if annotation.type == "file_citation":
|
277 |
+
a["file_citation"] = {
|
278 |
+
"file_id": annotation.file_citation.file_id,
|
279 |
+
"quote": annotation.file_citation.quote,
|
280 |
+
}
|
281 |
+
|
282 |
+
annotations.append(a)
|
283 |
+
|
284 |
content.append({
|
285 |
"text": {
|
286 |
+
"annotations": annotations,
|
287 |
"value": msg.text.value,
|
288 |
},
|
289 |
"type": msg.type,
|
|
|
326 |
return response
|
327 |
|
328 |
|
329 |
+
def convert_message_list_to_conversation(message_list: List[dict]) -> List[Tuple[str, str]]:
|
330 |
+
conversation = list()
|
331 |
+
for message in message_list:
|
332 |
+
role = message["role"]
|
333 |
+
content = message["content"]
|
334 |
+
for c in content:
|
335 |
+
c_type = c["type"]
|
336 |
+
if c_type != "text":
|
337 |
+
continue
|
338 |
+
text: dict = c["text"]
|
339 |
+
|
340 |
+
if c_type == "text":
|
341 |
+
text_value = text["value"]
|
342 |
+
text_annotations = text["annotations"]
|
343 |
+
|
344 |
+
msg = text_value
|
345 |
+
for text_annotation in text_annotations:
|
346 |
+
a_type = text_annotation["type"]
|
347 |
+
if a_type == "file_citation":
|
348 |
+
msg += "\n\n"
|
349 |
+
msg += "\nquote: \n{}\nfile_id: \n{}".format(
|
350 |
+
text_annotation["file_citation"]["quote"],
|
351 |
+
text_annotation["file_citation"]["file_id"],
|
352 |
+
)
|
353 |
+
else:
|
354 |
+
raise NotImplementedError
|
355 |
+
|
356 |
+
if role == "assistant":
|
357 |
+
msg = [None, msg]
|
358 |
+
else:
|
359 |
+
msg = [msg, None]
|
360 |
+
|
361 |
+
conversation.append(msg)
|
362 |
+
|
363 |
+
return conversation
|
364 |
+
|
365 |
+
|
366 |
def streaming_refresh(openai_api_key: str,
|
367 |
thread_id: str,
|
368 |
queue: Queue,
|
|
|
395 |
)
|
396 |
|
397 |
message_list = get_message_list(client, thread_id=thread_id)
|
398 |
+
print(message_list)
|
399 |
+
logger.info("message_list: {}".format(message_list))
|
400 |
+
conversation = convert_message_list_to_conversation(message_list)
|
401 |
+
return conversation
|
402 |
|
403 |
|
404 |
def add_and_run(openai_api_key: str,
|
|
|
406 |
thread_id: str,
|
407 |
name: str,
|
408 |
instructions: str,
|
409 |
+
description: str,
|
410 |
+
tools: str,
|
411 |
+
files: List[str],
|
412 |
+
file_ids: str,
|
413 |
model: str,
|
414 |
query: str,
|
415 |
):
|
|
|
417 |
api_key=openai_api_key,
|
418 |
)
|
419 |
if assistant_id is None or len(assistant_id.strip()) == 0:
|
420 |
+
assistant_id = click_create_assistant(
|
421 |
+
openai_api_key,
|
422 |
+
name, instructions, description, tools, files, file_ids, model
|
|
|
|
|
423 |
)
|
|
|
424 |
|
425 |
if thread_id is None or len(thread_id.strip()) == 0:
|
426 |
thread = client.beta.threads.create()
|
|
|
469 |
|
470 |
result = [
|
471 |
assistant_id, thread_id,
|
472 |
+
last_response,
|
473 |
+
[]
|
474 |
]
|
475 |
yield result
|
476 |
|
|
|
478 |
def main():
|
479 |
args = get_args()
|
480 |
|
481 |
+
gr_description = """
|
482 |
+
OpenAI Assistant
|
483 |
"""
|
484 |
|
485 |
# ui
|
486 |
with gr.Blocks() as blocks:
|
487 |
+
gr.Markdown(value=gr_description)
|
488 |
|
489 |
with gr.Row():
|
490 |
# settings
|
|
|
499 |
|
500 |
name = gr.Textbox(label="name")
|
501 |
instructions = gr.Textbox(label="instructions")
|
502 |
+
description = gr.Textbox(label="description")
|
503 |
|
504 |
model = gr.Dropdown(["gpt-4-1106-preview"], value="gpt-4-1106-preview", label="model")
|
505 |
|
506 |
# functions
|
507 |
+
tools = gr.TextArea(label="functions")
|
508 |
|
509 |
# upload files
|
510 |
+
retrieval_files = gr.Files(label="retrieval_files")
|
511 |
+
retrieval_file_ids = gr.TextArea(label="retrieval_file_ids")
|
512 |
+
|
513 |
+
# create assistant
|
514 |
+
create_assistant_button = gr.Button("create assistant")
|
515 |
+
|
516 |
+
with gr.TabItem("list assistant"):
|
517 |
+
list_assistant_button = gr.Button("list assistant")
|
518 |
+
assistant_list = gr.TextArea(label="assistant_list")
|
519 |
+
|
520 |
+
delete_assistant_id = gr.Textbox(label="delete_assistant_id")
|
521 |
+
delete_assistant_button = gr.Button("delete assistant")
|
522 |
+
|
523 |
+
with gr.TabItem("list file"):
|
524 |
+
list_file_button = gr.Button("list file")
|
525 |
+
file_list = gr.TextArea(label="file_list")
|
526 |
+
|
527 |
+
delete_file_id = gr.Textbox(label="delete_file_id")
|
528 |
+
delete_file_button = gr.Button("delete file")
|
529 |
+
|
530 |
+
upload_files = gr.Files(label="upload_files")
|
531 |
+
upload_files_button = gr.Button("upload file")
|
532 |
|
533 |
# chat
|
534 |
with gr.Column(scale=5):
|
535 |
+
chat_bot = gr.Chatbot(label="conversation", height=600)
|
536 |
+
query = gr.Textbox(lines=1, label="query")
|
|
|
|
|
|
|
537 |
|
538 |
with gr.Row():
|
539 |
with gr.Column(scale=1):
|
|
|
543 |
|
544 |
# states
|
545 |
with gr.Column(scale=2):
|
|
|
546 |
assistant_id = gr.Textbox(value=None, label="assistant_id")
|
547 |
thread_id = gr.Textbox(value=None, label="thread_id")
|
548 |
|
|
|
553 |
[
|
554 |
"Math Tutor",
|
555 |
"You are a personal math tutor. Write and run code to answer math questions.",
|
556 |
+
"Official math test cases",
|
557 |
+
None,
|
558 |
+
None,
|
559 |
"gpt-4-1106-preview",
|
560 |
"123 * 524 等于多少?"
|
561 |
+
],
|
562 |
+
[
|
563 |
+
"小说专家",
|
564 |
+
"根据小说内容回答问题。",
|
565 |
+
"三国演义文档问答测试",
|
566 |
+
"{\"type\": \"retrieval\"}",
|
567 |
+
[
|
568 |
+
(project_path / "data/三国演义.txt").as_posix()
|
569 |
+
],
|
570 |
+
"gpt-4-1106-preview",
|
571 |
+
"刘备和张飞是什么关系。"
|
572 |
+
],
|
573 |
],
|
574 |
inputs=[
|
575 |
+
name, instructions, description, tools, retrieval_files, model,
|
576 |
query,
|
577 |
],
|
578 |
examples_per_page=5
|
579 |
)
|
580 |
|
581 |
+
# create assistant
|
582 |
+
create_assistant_button.click(
|
583 |
+
click_create_assistant,
|
584 |
+
inputs=[
|
585 |
+
openai_api_key,
|
586 |
+
name, instructions, description, tools, retrieval_files, retrieval_file_ids, model,
|
587 |
+
],
|
588 |
+
outputs=[
|
589 |
+
assistant_id, thread_id
|
590 |
+
]
|
591 |
+
)
|
592 |
+
|
593 |
+
# list assistant
|
594 |
+
list_assistant_button.click(
|
595 |
+
click_list_assistant,
|
596 |
+
inputs=[
|
597 |
+
openai_api_key
|
598 |
+
],
|
599 |
+
outputs=[
|
600 |
+
assistant_list
|
601 |
+
]
|
602 |
+
)
|
603 |
+
|
604 |
+
# delete assistant button
|
605 |
+
delete_assistant_button.click(
|
606 |
+
click_delete_assistant,
|
607 |
+
inputs=[
|
608 |
+
openai_api_key,
|
609 |
+
delete_assistant_id
|
610 |
+
],
|
611 |
+
outputs=[
|
612 |
+
delete_assistant_id
|
613 |
+
]
|
614 |
+
)
|
615 |
+
|
616 |
+
# list file
|
617 |
+
list_file_button.click(
|
618 |
+
click_list_file,
|
619 |
+
inputs=[
|
620 |
+
openai_api_key
|
621 |
+
],
|
622 |
+
outputs=[
|
623 |
+
file_list
|
624 |
+
],
|
625 |
+
)
|
626 |
+
|
627 |
+
# delete file
|
628 |
+
delete_file_button.click(
|
629 |
+
click_delete_file,
|
630 |
+
inputs=[
|
631 |
+
openai_api_key,
|
632 |
+
delete_file_id
|
633 |
+
],
|
634 |
+
outputs=[
|
635 |
+
delete_file_id
|
636 |
+
]
|
637 |
+
)
|
638 |
+
|
639 |
+
# upload files
|
640 |
+
upload_files_button.click(
|
641 |
+
click_upload_files,
|
642 |
+
inputs=[
|
643 |
+
openai_api_key,
|
644 |
+
upload_files
|
645 |
+
],
|
646 |
+
outputs=[
|
647 |
+
|
648 |
+
]
|
649 |
+
)
|
650 |
+
|
651 |
# add and run
|
652 |
add_and_run_button.click(
|
653 |
add_and_run,
|
654 |
inputs=[
|
655 |
openai_api_key,
|
656 |
assistant_id, thread_id,
|
657 |
+
name, instructions, description, tools, retrieval_files, retrieval_file_ids, model,
|
658 |
query,
|
659 |
],
|
660 |
outputs=[
|
661 |
assistant_id, thread_id,
|
662 |
+
chat_bot
|
663 |
],
|
664 |
)
|
665 |
|
666 |
# refresh
|
|
|
667 |
refresh_button.click(
|
668 |
refresh,
|
669 |
inputs=[
|
|
|
671 |
thread_id,
|
672 |
],
|
673 |
outputs=[
|
674 |
+
chat_bot
|
675 |
]
|
676 |
)
|
677 |
|