add doc about effects for capabilities (#1842)

* add doc about effects for capabilities

* remove unnecessary imports

* improve doc

* test

* test location

* polish

* improve import of colored

* termcolor

* termcolor

* use pull request for openai test
This commit is contained in:
Chi Wang 2024-03-03 15:15:52 -08:00 committed by GitHub
parent f749deeda9
commit cd3b5c6bbd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
16 changed files with 50 additions and 119 deletions

View File

@ -4,7 +4,7 @@
name: OpenAI4ContribTests
on:
pull_request_target:
pull_request:
branches: ['main']
paths:
- 'autogen/**'
@ -173,7 +173,7 @@ jobs:
AZURE_OPENAI_API_BASE: ${{ secrets.AZURE_OPENAI_API_BASE }}
OAI_CONFIG_LIST: ${{ secrets.OAI_CONFIG_LIST }}
run: |
coverage run -a -m pytest test/agentchat/contrib/test_teachable_agent.py
coverage run -a -m pytest test/agentchat/contrib/capabilities/test_teachable_agent.py
coverage xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3

View File

@ -172,7 +172,7 @@ jobs:
- name: Coverage
run: |
pip install coverage>=5.3
coverage run -a -m pytest test/agentchat/contrib/test_teachable_agent.py --skip-openai
coverage run -a -m pytest test/agentchat/contrib/capabilities/test_teachable_agent.py --skip-openai
coverage xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3

View File

@ -37,7 +37,7 @@ jobs:
- name: pydoc-markdown install
run: |
python -m pip install --upgrade pip
pip install pydoc-markdown pyyaml colored
pip install pydoc-markdown pyyaml termcolor
- name: pydoc-markdown run
run: |
pydoc-markdown
@ -83,7 +83,7 @@ jobs:
- name: pydoc-markdown install
run: |
python -m pip install --upgrade pip
pip install pydoc-markdown pyyaml colored
pip install pydoc-markdown pyyaml termcolor
- name: pydoc-markdown run
run: |
pydoc-markdown

View File

@ -4,7 +4,7 @@
name: OpenAI
on:
pull_request_target:
pull_request:
branches: ["main"]
paths:
- "autogen/**"

View File

@ -3,15 +3,9 @@ import logging
from collections import defaultdict
from typing import Dict, List, Any, Set, Tuple
from dataclasses import dataclass
from .utils import consolidate_chat_info
import warnings
try:
from termcolor import colored
except ImportError:
def colored(x, *args, **kwargs):
return x
from .utils import consolidate_chat_info
logger = logging.getLogger(__name__)

View File

@ -25,10 +25,9 @@ class TransformChatHistory:
2. Second, it limits the number of message to keep
3. Third, it limits the total number of tokens in the chat history
Args:
max_tokens_per_message (Optional[int]): Maximum number of tokens to keep in each message.
max_messages (Optional[int]): Maximum number of messages to keep in the context.
max_tokens (Optional[int]): Maximum number of tokens to keep in the context.
When adding this capability to an agent, the following are modified:
- A hook is added to the hookable method `process_all_messages_before_reply` to transform the received messages for possible truncation.
Not modifying the stored message history.
"""
def __init__(
@ -38,6 +37,12 @@ class TransformChatHistory:
max_messages: Optional[int] = None,
max_tokens: Optional[int] = None,
):
"""
Args:
max_tokens_per_message (Optional[int]): Maximum number of tokens to keep in each message.
max_messages (Optional[int]): Maximum number of messages to keep in the context.
max_tokens (Optional[int]): Maximum number of tokens to keep in the context.
"""
self.max_tokens_per_message = max_tokens_per_message if max_tokens_per_message else sys.maxsize
self.max_messages = max_messages if max_messages else sys.maxsize
self.max_tokens = max_tokens if max_tokens else sys.maxsize

View File

@ -1,18 +1,12 @@
import os
from autogen.agentchat.assistant_agent import ConversableAgent
from autogen.agentchat.contrib.capabilities.agent_capability import AgentCapability
from autogen.agentchat.contrib.text_analyzer_agent import TextAnalyzerAgent
from typing import Dict, Optional, Union, List, Tuple, Any
from typing import Dict, Optional, Union
import chromadb
from chromadb.config import Settings
import pickle
try:
from termcolor import colored
except ImportError:
def colored(x, *args, **kwargs):
return x
from autogen.agentchat.assistant_agent import ConversableAgent
from autogen.agentchat.contrib.capabilities.agent_capability import AgentCapability
from autogen.agentchat.contrib.text_analyzer_agent import TextAnalyzerAgent
from autogen.agentchat.conversable_agent import colored
class Teachability(AgentCapability):
@ -23,6 +17,13 @@ class Teachability(AgentCapability):
To make any conversable agent teachable, instantiate both the agent and the Teachability class,
then pass the agent to teachability.add_to_agent(agent).
Note that teachable agents in a group chat must be given unique path_to_db_dir values.
When adding Teachability to an agent, the following are modified:
- The agent's system message is appended with a note about the agent's new ability.
- A hook is added to the agent's `process_last_received_message` hookable method,
and the hook potentially modifies the last of the received messages to include earlier teachings related to the message.
Added teachings do not propagate into the stored message history.
If new user teachings are detected, they are added to new memos in the vector database.
"""
def __init__(

View File

@ -1,25 +1,14 @@
import json
import logging
import os
import pdb
import re
from typing import Any, Dict, List, Optional, Tuple, Union
from typing import List, Optional, Tuple
import replicate
import requests
from regex import R
from autogen.agentchat.agent import Agent
from autogen.agentchat.contrib.img_utils import get_image_data, llava_formatter
from autogen.agentchat.contrib.multimodal_conversable_agent import MultimodalConversableAgent
from autogen.code_utils import content_str
try:
from termcolor import colored
except ImportError:
def colored(x, *args, **kwargs):
return x
from autogen.agentchat.conversable_agent import colored
logger = logging.getLogger(__name__)

View File

@ -1,26 +1,16 @@
import copy
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
from typing import Dict, List, Optional, Tuple, Union
from autogen import OpenAIWrapper
from autogen.agentchat import Agent, ConversableAgent
from autogen.agentchat.contrib.img_utils import (
convert_base64_to_data_uri,
gpt4v_formatter,
message_formatter_pil_to_b64,
pil_to_data_uri,
)
from autogen.code_utils import content_str
from ..._pydantic import model_dump
try:
from termcolor import colored
except ImportError:
def colored(x, *args, **kwargs):
return x
from autogen.code_utils import content_str
DEFAULT_LMM_SYS_MSG = """You are a helpful AI assistant."""
DEFAULT_MODEL = "gpt-4-vision-preview"

View File

@ -1,4 +1,6 @@
import re
from typing import Callable, Dict, Optional, Union, List, Tuple, Any
from IPython import get_ipython
try:
import chromadb
@ -10,16 +12,7 @@ from autogen.retrieve_utils import create_vector_db_from_dir, query_vector_db, T
from autogen.token_count_utils import count_token
from autogen.code_utils import extract_code
from autogen import logger
from typing import Callable, Dict, Optional, Union, List, Tuple, Any
from IPython import get_ipython
try:
from termcolor import colored
except ImportError:
def colored(x, *args, **kwargs):
return x
from autogen.agentchat.conversable_agent import colored
PROMPT_DEFAULT = """You're a retrieve augmented chatbot. You answer user's questions based on your own knowledge and the

View File

@ -3,21 +3,14 @@ import re
import uuid
import warnings
from typing import Any, ClassVar, List, Optional
from pydantic import BaseModel, Field, field_validator
from autogen.agentchat.conversable_agent import colored
from ..agentchat.agent import LLMAgent
from ..code_utils import execute_code
from .base import CodeBlock, CodeExtractor, CodeResult
from .markdown_code_extractor import MarkdownCodeExtractor
try:
from termcolor import colored
except ImportError:
def colored(x: Any, *args: Any, **kwargs: Any) -> str: # type: ignore[misc]
return x # type: ignore[no-any-return]
__all__ = (
"LocalCommandlineCodeExecutor",

View File

@ -161,14 +161,6 @@
"# Now add the Teachability capability to the agent.\n",
"teachability.add_to_agent(teachable_agent)\n",
"\n",
"try:\n",
" from termcolor import colored\n",
"except ImportError:\n",
"\n",
" def colored(x, *args, **kwargs):\n",
" return x\n",
"\n",
"\n",
"# Instantiate a UserProxyAgent to represent the user. But in this notebook, all user input will be simulated.\n",
"user = UserProxyAgent(\n",
" name=\"user\",\n",

View File

@ -1,24 +1,16 @@
#!/usr/bin/env python3 -m pytest
import os
import sys
from termcolor import colored
from autogen import UserProxyAgent, config_list_from_json
from autogen.agentchat.contrib.capabilities.teachability import Teachability
from autogen import ConversableAgent
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
sys.path.append(os.path.join(os.path.dirname(__file__), "../.."))
from test_assistant_agent import OAI_CONFIG_LIST, KEY_LOC # noqa: E402
try:
from termcolor import colored
except ImportError:
def colored(x, *args, **kwargs):
return x
# Specify the model to use. GPT-3.5 is less reliable than GPT-4 at learning from user input.
filter_dict = {"model": ["gpt-4-0125-preview"]}
# filter_dict = {"model": ["gpt-3.5-turbo-1106"]}

View File

@ -10,7 +10,7 @@ from autogen import AssistantAgent, UserProxyAgent
# from test_assistant_agent import KEY_LOC, OAI_CONFIG_LIST
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
sys.path.append(os.path.join(os.path.dirname(__file__), "../../.."))
from conftest import skip_openai # noqa: E402
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))

View File

@ -3,28 +3,21 @@
import pytest
import os
import sys
from termcolor import colored
from autogen import ConversableAgent, config_list_from_json
sys.path.append(os.path.join(os.path.dirname(__file__), "../.."))
sys.path.append(os.path.join(os.path.dirname(__file__), "../../.."))
from conftest import skip_openai # noqa: E402
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
sys.path.append(os.path.join(os.path.dirname(__file__), "../.."))
from test_assistant_agent import OAI_CONFIG_LIST, KEY_LOC # noqa: E402
try:
from openai import OpenAI
from autogen.agentchat.contrib.capabilities.teachability import Teachability
except ImportError:
skip = True
else:
skip = False or skip_openai
try:
from termcolor import colored
except ImportError:
def colored(x, *args, **kwargs):
return x
skip = skip_openai
# Specify the model to use by uncommenting one of the following lines.
@ -141,7 +134,7 @@ def use_task_advice_pair_phrasing():
@pytest.mark.skipif(
skip,
reason="do not run if dependency is not installed",
reason="do not run if dependency is not installed or requested to skip",
)
def test_teachability_code_paths():
"""Runs this file's unit tests."""
@ -172,7 +165,7 @@ def test_teachability_code_paths():
@pytest.mark.skipif(
skip,
reason="do not run if dependency is not installed",
reason="do not run if dependency is not installed or requested to skip",
)
def test_teachability_accuracy():
"""A very cheap and fast test of teachability accuracy."""

View File

@ -14,12 +14,10 @@ import time
import typing
import concurrent.futures
import os
from typing import Any, Dict, Optional, Tuple, Union
from typing import Dict, Optional, Tuple, Union
from dataclasses import dataclass
from multiprocessing import current_process
from termcolor import colored
try:
import yaml
@ -27,7 +25,6 @@ except ImportError:
print("pyyaml not found.\n\nPlease install pyyaml:\n\tpip install pyyaml\n")
sys.exit(1)
try:
import nbclient
from nbclient.client import (
@ -49,14 +46,6 @@ except ImportError:
print("test won't work without nbclient")
try:
from termcolor import colored
except ImportError:
def colored(x, *args, **kwargs):
return x
class Result:
def __init__(self, returncode: int, stdout: str, stderr: str):
self.returncode = returncode