mirror of https://github.com/microsoft/autogen.git
parent
59daf78d9f
commit
fbe8476726
|
@ -57,11 +57,11 @@ jobs:
|
|||
- name: Test with pytest skipping openai tests
|
||||
if: matrix.python-version != '3.10' && matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
pytest test --skip-openai --durations=10 --durations-min=1.0
|
||||
pytest test --ignore=test/agentchat/contrib --skip-openai --durations=10 --durations-min=1.0
|
||||
- name: Test with pytest skipping openai and docker tests
|
||||
if: matrix.python-version != '3.10' && matrix.os != 'ubuntu-latest'
|
||||
run: |
|
||||
pytest test --skip-openai --skip-docker --durations=10 --durations-min=1.0
|
||||
pytest test --ignore=test/agentchat/contrib --skip-openai --skip-docker --durations=10 --durations-min=1.0
|
||||
- name: Coverage
|
||||
if: matrix.python-version == '3.10'
|
||||
run: |
|
||||
|
|
|
@ -203,9 +203,6 @@ output after executing the code) and provide a corrected answer or code.
|
|||
Returns:
|
||||
agent: a set-up agent.
|
||||
"""
|
||||
from huggingface_hub import HfApi
|
||||
from huggingface_hub.utils import GatedRepoError, RepositoryNotFoundError
|
||||
|
||||
config_list = autogen.config_list_from_json(
|
||||
self.config_file_or_env,
|
||||
file_location=self.config_file_location,
|
||||
|
@ -218,10 +215,15 @@ output after executing the code) and provide a corrected answer or code.
|
|||
f"If you load configs from json, make sure the model in agent_configs is in the {self.config_file_or_env}."
|
||||
)
|
||||
try:
|
||||
from huggingface_hub import HfApi
|
||||
from huggingface_hub.utils import GatedRepoError, RepositoryNotFoundError
|
||||
|
||||
hf_api = HfApi()
|
||||
hf_api.model_info(model_name_or_hf_repo)
|
||||
model_name = model_name_or_hf_repo.split("/")[-1]
|
||||
server_id = f"{model_name}_{self.host}"
|
||||
except ImportError:
|
||||
server_id = self.online_server_name
|
||||
except GatedRepoError as e:
|
||||
raise e
|
||||
except RepositoryNotFoundError:
|
||||
|
@ -495,9 +497,6 @@ output after executing the code) and provide a corrected answer or code.
|
|||
agent_list: a list of agents.
|
||||
cached_configs: cached configs.
|
||||
"""
|
||||
import chromadb
|
||||
from chromadb.utils import embedding_functions
|
||||
|
||||
if code_execution_config is None:
|
||||
code_execution_config = {
|
||||
"last_n_messages": 2,
|
||||
|
@ -528,6 +527,9 @@ output after executing the code) and provide a corrected answer or code.
|
|||
|
||||
print("==> Looking for suitable agents in library...")
|
||||
if embedding_model is not None:
|
||||
import chromadb
|
||||
from chromadb.utils import embedding_functions
|
||||
|
||||
chroma_client = chromadb.Client()
|
||||
collection = chroma_client.create_collection(
|
||||
name="agent_list",
|
||||
|
|
|
@ -1 +1 @@
|
|||
__version__ = "0.2.23"
|
||||
__version__ = "0.2.24"
|
||||
|
|
|
@ -10,9 +10,17 @@ from autogen.agentchat.contrib.agent_builder import AgentBuilder
|
|||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
from conftest import skip_openai as skip # noqa: E402
|
||||
from conftest import reason, skip_openai # noqa: E402
|
||||
from test_assistant_agent import KEY_LOC, OAI_CONFIG_LIST # noqa: E402
|
||||
|
||||
try:
|
||||
import chromadb
|
||||
import huggingface_hub
|
||||
except ImportError:
|
||||
skip = True
|
||||
else:
|
||||
skip = False
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
||||
|
@ -30,8 +38,8 @@ def _config_check(config):
|
|||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip,
|
||||
reason="requested to skip",
|
||||
skip_openai,
|
||||
reason=reason,
|
||||
)
|
||||
def test_build():
|
||||
builder = AgentBuilder(
|
||||
|
@ -59,8 +67,8 @@ def test_build():
|
|||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip,
|
||||
reason="requested to skip",
|
||||
skip_openai or skip,
|
||||
reason=reason + "OR dependency not installed",
|
||||
)
|
||||
def test_build_from_library():
|
||||
builder = AgentBuilder(
|
||||
|
@ -109,8 +117,8 @@ def test_build_from_library():
|
|||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip,
|
||||
reason="requested to skip",
|
||||
skip_openai,
|
||||
reason=reason,
|
||||
)
|
||||
def test_save():
|
||||
builder = AgentBuilder(
|
||||
|
@ -143,8 +151,8 @@ def test_save():
|
|||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip,
|
||||
reason="requested to skip",
|
||||
skip_openai,
|
||||
reason=reason,
|
||||
)
|
||||
def test_load():
|
||||
builder = AgentBuilder(
|
||||
|
@ -169,8 +177,8 @@ def test_load():
|
|||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip,
|
||||
reason="requested to skip",
|
||||
skip_openai,
|
||||
reason=reason,
|
||||
)
|
||||
def test_clear_agent():
|
||||
builder = AgentBuilder(
|
||||
|
|
|
@ -1,19 +1,22 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
from conftest import skip_openai as skip
|
||||
from test_assistant_agent import KEY_LOC, OAI_CONFIG_LIST
|
||||
|
||||
import autogen
|
||||
from autogen import AssistantAgent, UserProxyAgent, config_list_from_json
|
||||
from autogen import AssistantAgent, UserProxyAgent
|
||||
from autogen.agentchat.contrib.agent_optimizer import AgentOptimizer
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
from conftest import reason, skip_openai
|
||||
from test_assistant_agent import KEY_LOC, OAI_CONFIG_LIST
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip,
|
||||
reason="requested to skip",
|
||||
skip_openai,
|
||||
reason=reason,
|
||||
)
|
||||
def test_record_conversation():
|
||||
problem = "Simplify $\\sqrt[3]{1+8} \\cdot \\sqrt[3]{1+\\sqrt[3]{8}}"
|
||||
|
@ -54,8 +57,8 @@ def test_record_conversation():
|
|||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip,
|
||||
reason="requested to skip",
|
||||
skip_openai,
|
||||
reason=reason,
|
||||
)
|
||||
def test_step():
|
||||
problem = "Simplify $\\sqrt[3]{1+8} \\cdot \\sqrt[3]{1+\\sqrt[3]{8}}"
|
||||
|
|
|
@ -14,12 +14,12 @@ from autogen.agentchat.contrib.gpt_assistant_agent import GPTAssistantAgent
|
|||
from autogen.oai.openai_utils import retrieve_assistants_by_name
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
from conftest import skip_openai as skip # noqa: E402
|
||||
from conftest import reason, skip_openai # noqa: E402
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
|
||||
from test_assistant_agent import KEY_LOC, OAI_CONFIG_LIST # noqa: E402
|
||||
|
||||
if not skip:
|
||||
if not skip_openai:
|
||||
openai_config_list = autogen.config_list_from_json(
|
||||
OAI_CONFIG_LIST,
|
||||
file_location=KEY_LOC,
|
||||
|
@ -45,8 +45,8 @@ if not skip:
|
|||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip,
|
||||
reason="requested to skip",
|
||||
skip_openai,
|
||||
reason=reason,
|
||||
)
|
||||
def test_config_list() -> None:
|
||||
assert len(openai_config_list) > 0
|
||||
|
@ -54,8 +54,8 @@ def test_config_list() -> None:
|
|||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip,
|
||||
reason="requested to skip",
|
||||
skip_openai,
|
||||
reason=reason,
|
||||
)
|
||||
def test_gpt_assistant_chat() -> None:
|
||||
for gpt_config in [openai_config_list, aoai_config_list]:
|
||||
|
@ -128,8 +128,8 @@ def _test_gpt_assistant_chat(gpt_config) -> None:
|
|||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip,
|
||||
reason="requested to skip",
|
||||
skip_openai,
|
||||
reason=reason,
|
||||
)
|
||||
def test_get_assistant_instructions() -> None:
|
||||
for gpt_config in [openai_config_list, aoai_config_list]:
|
||||
|
@ -157,8 +157,8 @@ def _test_get_assistant_instructions(gpt_config) -> None:
|
|||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip,
|
||||
reason="requested to skip",
|
||||
skip_openai,
|
||||
reason=reason,
|
||||
)
|
||||
def test_gpt_assistant_instructions_overwrite() -> None:
|
||||
for gpt_config in [openai_config_list, aoai_config_list]:
|
||||
|
@ -211,8 +211,8 @@ def _test_gpt_assistant_instructions_overwrite(gpt_config) -> None:
|
|||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip,
|
||||
reason="requested to skip",
|
||||
skip_openai,
|
||||
reason=reason,
|
||||
)
|
||||
def test_gpt_assistant_existing_no_instructions() -> None:
|
||||
"""
|
||||
|
@ -251,8 +251,8 @@ def test_gpt_assistant_existing_no_instructions() -> None:
|
|||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip,
|
||||
reason="requested to skip",
|
||||
skip_openai,
|
||||
reason=reason,
|
||||
)
|
||||
def test_get_assistant_files() -> None:
|
||||
"""
|
||||
|
@ -288,8 +288,8 @@ def test_get_assistant_files() -> None:
|
|||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip,
|
||||
reason="requested to skip",
|
||||
skip_openai,
|
||||
reason=reason,
|
||||
)
|
||||
def test_assistant_retrieval() -> None:
|
||||
"""
|
||||
|
@ -365,8 +365,8 @@ def test_assistant_retrieval() -> None:
|
|||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip,
|
||||
reason="requested to skip",
|
||||
skip_openai,
|
||||
reason=reason,
|
||||
)
|
||||
def test_assistant_mismatch_retrieval() -> None:
|
||||
"""Test function to check if the GPTAssistantAgent can filter out the mismatch assistant"""
|
||||
|
@ -487,8 +487,8 @@ def test_assistant_mismatch_retrieval() -> None:
|
|||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip,
|
||||
reason="requested to skip",
|
||||
skip_openai,
|
||||
reason=reason,
|
||||
)
|
||||
def test_gpt_assistant_tools_overwrite() -> None:
|
||||
"""
|
||||
|
@ -609,8 +609,8 @@ def test_gpt_assistant_tools_overwrite() -> None:
|
|||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip,
|
||||
reason="requested to skip",
|
||||
skip_openai,
|
||||
reason=reason,
|
||||
)
|
||||
def test_gpt_reflection_with_llm() -> None:
|
||||
gpt_assistant = GPTAssistantAgent(
|
||||
|
|
|
@ -10,7 +10,7 @@ from autogen import UserProxyAgent, config_list_from_json
|
|||
from autogen.oai.openai_utils import filter_config
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
from conftest import MOCK_OPEN_AI_API_KEY, skip_openai # noqa: E402
|
||||
from conftest import MOCK_OPEN_AI_API_KEY, reason, skip_openai # noqa: E402
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
|
||||
from test_assistant_agent import KEY_LOC, OAI_CONFIG_LIST # noqa: E402
|
||||
|
@ -26,13 +26,6 @@ except ImportError:
|
|||
else:
|
||||
skip_all = False
|
||||
|
||||
try:
|
||||
from openai import OpenAI
|
||||
except ImportError:
|
||||
skip_oai = True
|
||||
else:
|
||||
skip_oai = False or skip_openai
|
||||
|
||||
try:
|
||||
BING_API_KEY = os.environ["BING_API_KEY"]
|
||||
except KeyError:
|
||||
|
@ -40,7 +33,7 @@ except KeyError:
|
|||
else:
|
||||
skip_bing = False
|
||||
|
||||
if not skip_oai:
|
||||
if not skip_openai:
|
||||
config_list = config_list_from_json(env_or_file=OAI_CONFIG_LIST, file_location=KEY_LOC)
|
||||
|
||||
|
||||
|
@ -104,8 +97,8 @@ def test_web_surfer() -> None:
|
|||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip_oai,
|
||||
reason="do not run if oai is not installed",
|
||||
skip_all or skip_openai,
|
||||
reason="dependency is not installed OR" + reason,
|
||||
)
|
||||
def test_web_surfer_oai() -> None:
|
||||
llm_config = {"config_list": config_list, "timeout": 180, "cache_seed": 42}
|
||||
|
|
|
@ -1,15 +1,19 @@
|
|||
import json
|
||||
import os
|
||||
import sqlite3
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
from conftest import skip_openai
|
||||
from test_assistant_agent import KEY_LOC, OAI_CONFIG_LIST
|
||||
|
||||
import autogen
|
||||
import autogen.runtime_logging
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
from conftest import skip_openai # noqa: E402
|
||||
from test_assistant_agent import KEY_LOC, OAI_CONFIG_LIST # noqa: E402
|
||||
|
||||
TEACHER_MESSAGE = """
|
||||
You are roleplaying a math teacher, and your job is to help your students with linear algebra.
|
||||
Keep your explanations short.
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
from conftest import skip_openai
|
||||
|
||||
from autogen import UserProxyAgent
|
||||
from autogen.code_utils import (
|
||||
|
@ -9,19 +9,15 @@ from autogen.code_utils import (
|
|||
is_docker_running,
|
||||
)
|
||||
|
||||
try:
|
||||
import openai
|
||||
except ImportError:
|
||||
skip = True
|
||||
else:
|
||||
skip = False or skip_openai
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
|
||||
from conftest import reason, skip_openai
|
||||
|
||||
|
||||
def docker_running():
|
||||
return is_docker_running() or in_docker_container()
|
||||
|
||||
|
||||
@pytest.mark.skipif(skip, reason="openai not installed")
|
||||
@pytest.mark.skipif(skip_openai, reason=reason)
|
||||
def test_agent_setup_with_code_execution_off():
|
||||
user_proxy = UserProxyAgent(
|
||||
name="test_agent",
|
||||
|
@ -32,7 +28,7 @@ def test_agent_setup_with_code_execution_off():
|
|||
assert user_proxy._code_execution_config is False
|
||||
|
||||
|
||||
@pytest.mark.skipif(skip, reason="openai not installed")
|
||||
@pytest.mark.skipif(skip_openai, reason=reason)
|
||||
def test_agent_setup_with_use_docker_false():
|
||||
user_proxy = UserProxyAgent(
|
||||
name="test_agent",
|
||||
|
@ -43,7 +39,7 @@ def test_agent_setup_with_use_docker_false():
|
|||
assert user_proxy._code_execution_config["use_docker"] is False
|
||||
|
||||
|
||||
@pytest.mark.skipif(skip, reason="openai not installed")
|
||||
@pytest.mark.skipif(skip_openai, reason=reason)
|
||||
def test_agent_setup_with_env_variable_false_and_docker_running(monkeypatch):
|
||||
monkeypatch.setenv("AUTOGEN_USE_DOCKER", "False")
|
||||
|
||||
|
@ -55,7 +51,7 @@ def test_agent_setup_with_env_variable_false_and_docker_running(monkeypatch):
|
|||
assert user_proxy._code_execution_config["use_docker"] is False
|
||||
|
||||
|
||||
@pytest.mark.skipif(skip or (not docker_running()), reason="openai not installed OR docker not running")
|
||||
@pytest.mark.skipif(skip_openai or (not docker_running()), reason=reason + " OR docker not running")
|
||||
def test_agent_setup_with_default_and_docker_running(monkeypatch):
|
||||
monkeypatch.delenv("AUTOGEN_USE_DOCKER", raising=False)
|
||||
|
||||
|
@ -71,7 +67,7 @@ def test_agent_setup_with_default_and_docker_running(monkeypatch):
|
|||
assert user_proxy._code_execution_config["use_docker"] is True
|
||||
|
||||
|
||||
@pytest.mark.skipif(skip or (docker_running()), reason="openai not installed OR docker running")
|
||||
@pytest.mark.skipif(skip_openai or (docker_running()), reason=reason + " OR docker running")
|
||||
def test_raises_error_agent_setup_with_default_and_docker_not_running(monkeypatch):
|
||||
monkeypatch.delenv("AUTOGEN_USE_DOCKER", raising=False)
|
||||
with pytest.raises(RuntimeError):
|
||||
|
@ -81,7 +77,7 @@ def test_raises_error_agent_setup_with_default_and_docker_not_running(monkeypatc
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.skipif(skip or (docker_running()), reason="openai not installed OR docker running")
|
||||
@pytest.mark.skipif(skip_openai or (docker_running()), reason=" OR docker running")
|
||||
def test_raises_error_agent_setup_with_env_variable_true_and_docker_not_running(monkeypatch):
|
||||
monkeypatch.setenv("AUTOGEN_USE_DOCKER", "True")
|
||||
|
||||
|
@ -92,7 +88,7 @@ def test_raises_error_agent_setup_with_env_variable_true_and_docker_not_running(
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.skipif(skip or (not docker_running()), reason="openai not installed OR docker not running")
|
||||
@pytest.mark.skipif(skip_openai or (not docker_running()), reason=" OR docker not running")
|
||||
def test_agent_setup_with_env_variable_true_and_docker_running(monkeypatch):
|
||||
monkeypatch.setenv("AUTOGEN_USE_DOCKER", "True")
|
||||
|
||||
|
|
|
@ -12,10 +12,10 @@ import autogen
|
|||
from autogen import AssistantAgent, UserProxyAgent, gather_usage_summary
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
|
||||
from conftest import skip_openai as skip # noqa: E402
|
||||
from conftest import reason, skip_openai # noqa: E402
|
||||
|
||||
|
||||
@pytest.mark.skipif(skip, reason="openai not installed OR requested to skip")
|
||||
@pytest.mark.skipif(skip_openai, reason=reason)
|
||||
def test_gathering():
|
||||
config_list = autogen.config_list_from_json(
|
||||
OAI_CONFIG_LIST,
|
||||
|
@ -80,7 +80,7 @@ def test_gathering():
|
|||
print("Total usage summary:", total_usage_summary)
|
||||
|
||||
|
||||
@pytest.mark.skipif(skip, reason="openai not installed OR requested to skip")
|
||||
@pytest.mark.skipif(skip_openai, reason=reason)
|
||||
def test_agent_usage():
|
||||
config_list = autogen.config_list_from_json(
|
||||
OAI_CONFIG_LIST,
|
||||
|
|
|
@ -9,13 +9,8 @@ from test_assistant_agent import KEY_LOC, OAI_CONFIG_LIST
|
|||
|
||||
import autogen
|
||||
|
||||
try:
|
||||
from openai import OpenAI
|
||||
except ImportError:
|
||||
skip = True
|
||||
else:
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
|
||||
from conftest import skip_openai as skip
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
|
||||
from conftest import reason, skip_openai
|
||||
|
||||
func_def = {
|
||||
"name": "get_random_number",
|
||||
|
@ -28,8 +23,8 @@ func_def = {
|
|||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
skip,
|
||||
reason="do not run if openai is not installed or requested to skip",
|
||||
skip_openai,
|
||||
reason=reason,
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"key, value, sync",
|
||||
|
|
|
@ -10,10 +10,10 @@ from test_assistant_agent import KEY_LOC, OAI_CONFIG_LIST
|
|||
import autogen
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
|
||||
from conftest import skip_openai as skip # noqa: E402
|
||||
from conftest import reason, skip_openai # noqa: E402
|
||||
|
||||
|
||||
@pytest.mark.skipif(skip, reason="requested to skip")
|
||||
@pytest.mark.skipif(skip_openai, reason=reason)
|
||||
def test_get_human_input():
|
||||
config_list = autogen.config_list_from_json(OAI_CONFIG_LIST, KEY_LOC)
|
||||
|
||||
|
|
|
@ -8,12 +8,14 @@ import pytest
|
|||
import autogen
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
|
||||
from conftest import skip_openai # noqa: E402
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
from conftest import reason, skip_openai # noqa: E402
|
||||
from test_assistant_agent import KEY_LOC, OAI_CONFIG_LIST # noqa: E402
|
||||
|
||||
|
||||
@pytest.mark.skipif(skip_openai, reason="requested to skip openai tests")
|
||||
@pytest.mark.skipif(skip_openai, reason=reason)
|
||||
def test_nested():
|
||||
config_list = autogen.config_list_from_json(env_or_file="OAI_CONFIG_LIST")
|
||||
config_list = autogen.config_list_from_json(env_or_file=OAI_CONFIG_LIST, file_location=KEY_LOC)
|
||||
llm_config = {"config_list": config_list}
|
||||
|
||||
tasks = [
|
||||
|
|
|
@ -3,7 +3,7 @@ import pytest
|
|||
skip_openai = False
|
||||
skip_redis = False
|
||||
skip_docker = False
|
||||
|
||||
reason = "requested to skip"
|
||||
MOCK_OPEN_AI_API_KEY = "sk-mockopenaiAPIkeyinexpectedformatfortestingonly"
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue