fix append_oai_message (#47)

* fix append_oai_message

* add testcase for groupchat

* add test_oai to openai workflow

* code formate

* update

* formate

* update
This commit is contained in:
Yiran Wu 2023-10-02 14:52:38 -04:00 committed by GitHub
parent 0481483faa
commit 4d85f4db0a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 74 additions and 6 deletions

View File

@ -69,6 +69,7 @@ jobs:
OAI_CONFIG_LIST: ${{ secrets.OAI_CONFIG_LIST }}
run: |
pip install nbconvert nbformat ipykernel
coverage run -a -m pytest test/test_with_openai.py
coverage run -a -m pytest test/test_notebook.py
coverage xml
cat "$(pwd)/test/executed_openai_notebook_output.txt"

View File

@ -259,6 +259,7 @@ class ConversableAgent(Agent):
If the message received is a string, it will be put in the "content" field of the new dictionary.
If the message received is a dictionary but does not have any of the two fields "content" or "function_call",
this message is not a valid ChatCompletion message.
If only "function_call" is provided, "content" will be set to None if not provided, and the role of the message will be forced "assistant".
Args:
message (dict or str): message to be appended to the ChatCompletion conversation.
@ -271,10 +272,15 @@ class ConversableAgent(Agent):
message = self._message_to_dict(message)
# create oai message to be appended to the oai conversation that can be passed to oai directly.
oai_message = {k: message[k] for k in ("content", "function_call", "name", "context") if k in message}
if "content" not in oai_message and "function_call" not in oai_message:
return False
if "content" not in oai_message:
if "function_call" in oai_message:
oai_message["content"] = None # if only function_call is provided, content will be set to None.
else:
return False
oai_message["role"] = "function" if message.get("role") == "function" else role
if "function_call" in oai_message:
oai_message["role"] = "assistant" # only messages with role 'assistant' can have a function call.
self._oai_messages[conversation_id].append(oai_message)
return True
@ -289,8 +295,8 @@ class ConversableAgent(Agent):
Args:
message (dict or str): message to be sent.
The message could contain the following fields (either content or function_call must be provided):
- content (str): the content of the message.
The message could contain the following fields:
- content (str): Required, the content of the message. (Can be None)
- function_call (str): the name of the function to be called.
- name (str): the name of the function to be called.
- role (str): the role of the message, any role that is not "function"
@ -338,8 +344,8 @@ class ConversableAgent(Agent):
Args:
message (dict or str): message to be sent.
The message could contain the following fields (either content or function_call must be provided):
- content (str): the content of the message.
The message could contain the following fields:
- content (str): Required, the content of the message. (Can be None)
- function_call (str): the name of the function to be called.
- name (str): the name of the function to be called.
- role (str): the role of the message, any role that is not "function"

61
test/test_with_openai.py Normal file
View File

@ -0,0 +1,61 @@
import autogen
import pytest
import sys
try:
import openai
skip = False
except ImportError:
skip = True
@pytest.mark.skipif(
skip or not sys.version.startswith("3.10"),
reason="do not run if openai is not installed or py!=3.10",
)
def test_function_call_groupchat():
import random
def get_random_number():
return random.randint(0, 100)
config_list_gpt4 = autogen.config_list_from_json(
"OAI_CONFIG_LIST",
filter_dict={
"model": ["gpt-4", "gpt-4-0314", "gpt4", "gpt-4-32k", "gpt-4-32k-0314", "gpt-4-32k-v0314"],
},
)
llm_config = {
"config_list": config_list_gpt4,
"seed": 42,
"functions": [
{
"name": "get_random_number",
"description": "Get a random number between 0 and 100",
"parameters": {
"type": "object",
"properties": {},
},
},
],
}
user_proxy = autogen.UserProxyAgent(
name="User_proxy",
system_message="A human admin that will execute function_calls.",
function_map={"get_random_number": get_random_number},
human_input_mode="NEVER",
)
coder = autogen.AssistantAgent(
name="Player",
system_message="You will can function `get_random_number` to get a random number. Stop only when you get at least 1 even number and 1 odd number. Reply TERMINATE to stop.",
llm_config=llm_config,
)
groupchat = autogen.GroupChat(agents=[user_proxy, coder], messages=[], max_round=7)
manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=llm_config)
user_proxy.initiate_chat(manager, message="Let's start the game!")
if __name__ == "__main__":
test_function_call_groupchat()