2023-09-29 04:14:36 +08:00
|
|
|
from autogen import AssistantAgent, UserProxyAgent, config_list_from_json
|
|
|
|
|
|
|
|
# Load LLM inference endpoints from an env variable or a file
|
|
|
|
# See https://microsoft.github.io/autogen/docs/FAQ#set-your-api-endpoints
|
2023-10-01 00:21:07 +08:00
|
|
|
# and OAI_CONFIG_LIST_sample
|
2023-09-29 04:14:36 +08:00
|
|
|
config_list = config_list_from_json(env_or_file="OAI_CONFIG_LIST")
|
|
|
|
assistant = AssistantAgent("assistant", llm_config={"config_list": config_list})
|
|
|
|
user_proxy = UserProxyAgent("user_proxy", code_execution_config={"work_dir": "coding"})
|
|
|
|
user_proxy.initiate_chat(assistant, message="Plot a chart of NVDA and TESLA stock price change YTD.")
|