function_call.py 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155
  1. """
  2. Inspired by the OpenAI example found here:
  3. https://platform.openai.com/docs/guides/function-calling/parallel-function-calling
  4. """
  5. import datetime
  6. from openai import OpenAI
  7. import json
  8. client = OpenAI(api_key="EMPTY", base_url="http://localhost:2242/v1")
  9. models = client.models.list()
  10. model = models.data[0].id
  11. stream = True
  12. def get_current_date_utc():
  13. print("Calling get_current_date_utc client side.")
  14. return datetime.datetime.now(datetime.timezone.utc).strftime(
  15. "The current UTC datetime is (day: %A, date (day/month/year): %d/%m/%Y, time: %H:%M)."
  16. )
  17. # Example dummy function hard coded to return the same weather
  18. # In production, this could be your backend API or an external API
  19. def get_current_weather(location, unit="fahrenheit"):
  20. """Get the current weather in a given location"""
  21. print("Calling get_current_weather client side.")
  22. if "tokyo" in location.lower():
  23. return json.dumps({
  24. "location": "Tokyo",
  25. "temperature": "10",
  26. "unit": unit
  27. })
  28. elif "san francisco" in location.lower():
  29. return json.dumps({
  30. "location": "San Francisco",
  31. "temperature": "72",
  32. "unit": unit
  33. })
  34. elif "paris" in location.lower():
  35. return json.dumps({
  36. "location": "Paris",
  37. "temperature": "22",
  38. "unit": unit
  39. })
  40. else:
  41. return json.dumps({"location": location, "temperature": "unknown"})
  42. def run_conversation():
  43. # Step 1: send the conversation and available functions to the model
  44. # messages = [{"role": "user", "content": "What's the weather like in San Francisco, Tokyo, and Paris?"}]
  45. messages = [{
  46. "role":
  47. "user",
  48. "content":
  49. "What's the weather like in San Francisco, Tokyo, and Paris ? We also need to know the current date."
  50. }]
  51. tools = [{
  52. "type": "function",
  53. "function": {
  54. "name": "get_current_weather",
  55. "description": "Get the current weather in a given location",
  56. "parameters": {
  57. "type": "object",
  58. "properties": {
  59. "location": {
  60. "type":
  61. "string",
  62. "description":
  63. "The city and state, e.g. San Francisco, CA",
  64. },
  65. "unit": {
  66. "type": "string",
  67. "enum": ["celsius", "fahrenheit"]
  68. },
  69. },
  70. "required": ["location"],
  71. },
  72. },
  73. }, {
  74. "type": "function",
  75. "function": {
  76. "name": "get_current_date_utc",
  77. "description": "Get the current UTC time",
  78. },
  79. }]
  80. response = client.chat.completions.create(
  81. model=model,
  82. messages=messages,
  83. tools=tools,
  84. stream=stream,
  85. tool_choice="auto", # auto is default, but we'll be explicit
  86. )
  87. response_message = ""
  88. tool_calls = None
  89. if stream:
  90. text_message = ""
  91. for chunk in response:
  92. if chunk.choices[0].finish_reason is not None:
  93. if chunk.choices[0].finish_reason == "tool_calls":
  94. tool_calls = chunk.choices[0].delta.tool_calls
  95. break
  96. if chunk.choices[0].delta.content is not None:
  97. text_message += chunk.choices[0].delta.content
  98. response_message = {"role": "assistant", "content": text_message}
  99. else:
  100. if not len(response.choices):
  101. return None
  102. response_message = response.choices[0].message
  103. # print(str(response_message))
  104. tool_calls = response_message.tool_calls
  105. # Step 2: check if the model wanted to call a function
  106. if tool_calls:
  107. # Step 3: call the function
  108. # Note: the JSON response may not always be valid; be sure to handle errors
  109. available_functions = {
  110. "get_current_weather": get_current_weather,
  111. "get_current_date_utc": get_current_date_utc,
  112. }
  113. messages.append(
  114. response_message) # extend conversation with assistant's reply
  115. # Step 4: send the info for each function call and function response to the model
  116. for tool_call in tool_calls:
  117. function_name = tool_call.function.name
  118. function_to_call = available_functions[function_name]
  119. if function_name == "get_current_weather":
  120. function_args = json.loads(tool_call.function.arguments)
  121. function_response = function_to_call(
  122. location=function_args.get("location"),
  123. unit=function_args.get("unit"),
  124. )
  125. else:
  126. function_response = function_to_call()
  127. messages.append({
  128. "tool_call_id": tool_call.id,
  129. "role": "tool",
  130. "name": function_name,
  131. "content": function_response,
  132. }) # extend conversation with function response
  133. second_response = client.chat.completions.create(
  134. model=model,
  135. messages=messages,
  136. ) # get a new response from the model where it can see the function response
  137. for it_msg, msg in enumerate(messages):
  138. print("Message %i:\n %s\n" % (it_msg, str(msg)))
  139. return second_response
  140. result = run_conversation()
  141. print("Final response:\n%s" % result)