Spaces:
Running
Running
File size: 3,504 Bytes
44e1a5b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
https://platform.openai.com/docs/guides/function-calling
"""
import argparse
import json
import time
import openai
from openai import OpenAI
from openai.pagination import SyncCursorPage
from openai.types.beta.threads import ThreadMessage
from openai.types.beta.assistant import Assistant
from project_settings import environment, project_path
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--openai_api_key",
default=environment.get("openai_api_key", default=None, dtype=str),
type=str
)
args = parser.parse_args()
return args
def get_current_weather(location, unit="fahrenheit"):
if "tokyo" in location.lower():
return json.dumps({"location": location, "temperature": "10", "unit": "celsius"})
elif "san francisco" in location.lower():
return json.dumps({"location": location, "temperature": "72", "unit": "fahrenheit"})
else:
return json.dumps({"location": location, "temperature": "22", "unit": "celsius"})
available_functions = {
"get_current_weather": get_current_weather,
}
def main():
"""
assistant.id: asst_9iUOSeG3dUgzBxYqfygvtKLi
thread.id: thread_9C4dDj5i4jDCtkMCujyBleOc
"""
args = get_args()
client = OpenAI(
api_key=args.openai_api_key
)
messages = [{"role": "user", "content": "What's the weather like in San Francisco, Tokyo, and Paris?"}]
tools = [
{
"type": "function",
"function": {
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
},
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
},
"required": ["location"],
},
},
}
]
response = openai.chat.completions.create(
model="gpt-3.5-turbo-1106",
messages=messages,
tools=tools,
tool_choice="auto", # auto is default, but we'll be explicit
)
print(response.choices)
response_message = response.choices[0].message
tool_calls = response_message.tool_calls
print(tool_calls)
if tool_calls:
messages.append(response_message)
for tool_call in tool_calls:
function_name = tool_call.function.name
function_to_call = available_functions[function_name]
function_args = json.loads(tool_call.function.arguments)
function_response = function_to_call(
location=function_args.get("location"),
unit=function_args.get("unit"),
)
messages.append(
{
"tool_call_id": tool_call.id,
"role": "tool",
"name": function_name,
"content": function_response,
}
)
second_response = openai.chat.completions.create(
model="gpt-3.5-turbo-1106",
messages=messages,
)
print("second_response: {}".format(second_response))
return
if __name__ == '__main__':
main()
|