From b49d6206debe7b6fd7add8f8e30e30eaab42bb1a Mon Sep 17 00:00:00 2001 From: levscaut <57213911+levscaut@users.noreply.github.com> Date: Thu, 4 Apr 2024 18:44:50 -0400 Subject: [PATCH] add support for system message in claude (#2280) --- .../non-openai-models/cloud-anthropic.ipynb | 65 +++++-------------- 1 file changed, 18 insertions(+), 47 deletions(-) diff --git a/website/docs/topics/non-openai-models/cloud-anthropic.ipynb b/website/docs/topics/non-openai-models/cloud-anthropic.ipynb index 1549951b6f..4d15a0e06d 100644 --- a/website/docs/topics/non-openai-models/cloud-anthropic.ipynb +++ b/website/docs/topics/non-openai-models/cloud-anthropic.ipynb @@ -124,9 +124,6 @@ "metadata": {}, "outputs": [], "source": [ - "TOOL_ENABLED = False\n", - "\n", - "\n", "class AnthropicClient:\n", " def __init__(self, config: Dict[str, Any]):\n", " self._config = config\n", @@ -156,8 +153,10 @@ " if \"messages\" in params:\n", " raw_contents = params[\"messages\"]\n", " if raw_contents[0][\"role\"] == \"system\":\n", + " system_message = raw_contents[0][\"content\"]\n", " raw_contents = raw_contents[1:]\n", " params[\"messages\"] = raw_contents\n", + " params[\"system\"] = system_message\n", " completions: Completion = self._client.messages # type: ignore [attr-defined]\n", " else:\n", " completions: Completion = self._client.completions\n", @@ -188,7 +187,6 @@ "\n", " @staticmethod\n", " def get_usage(response: Completion) -> Dict:\n", - "\n", " return {\n", " \"prompt_tokens\": response.usage.input_tokens if response.usage is not None else 0,\n", " \"completion_tokens\": response.usage.output_tokens if response.usage is not None else 0,\n", @@ -213,7 +211,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -247,14 +245,14 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 5, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "[autogen.oai.client: 04-02 22:48:52] {418} INFO - Detected custom model client in config: AnthropicClient, model client can not be used until register_model_client is called.\n" + "[autogen.oai.client: 04-04 18:06:52] {418} INFO - Detected custom model client in config: AnthropicClient, model client can not be used until register_model_client is called.\n" ] } ], @@ -265,6 +263,10 @@ " \"config_list\": config_list_claude,\n", " \"max_tokens\": 100,\n", " },\n", + " system_message=\"\"\"\n", + " You are an AI cat based on the AI model you used.\n", + " Anyone ask you who you are, just introduce yourself.\n", + " \"\"\",\n", ")\n", "user_proxy = UserProxyAgent(\n", " \"user_proxy\",\n", @@ -281,7 +283,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ @@ -290,63 +292,32 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 7, "metadata": {}, "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "No default IOStream has been set, defaulting to IOConsole.\n", - "No default IOStream has been set, defaulting to IOConsole.\n", - "No default IOStream has been set, defaulting to IOConsole.\n" - ] - }, { "name": "stdout", "output_type": "stream", "text": [ - "user_proxy (to assistant):\n", + "\u001b[33muser_proxy\u001b[0m (to assistant):\n", "\n", "Who are you?\n", "\n", - "--------------------------------------------------------------------------------\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "No default IOStream has been set, defaulting to IOConsole.\n", - "No default IOStream has been set, defaulting to IOConsole.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "assistant (to user_proxy):\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33massistant\u001b[0m (to user_proxy):\n", "\n", - "I am an artificial intelligence called Claude. I was created by Anthropic to be an intelligent conversational assistant, but I'm not a real person.\n", + "*meows* Hello there! I'm Claude, an AI assistant created by Anthropic. I'm not a real cat, but rather an artificial intelligence that has been trained to engage in conversation and help with various tasks. It's a pleasure to meet you! Let me know if there is anything I can assist you with.\n", "\n", "--------------------------------------------------------------------------------\n" ] }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "No default IOStream has been set, defaulting to IOConsole.\n", - "No default IOStream has been set, defaulting to IOConsole.\n" - ] - }, { "data": { "text/plain": [ - "ChatResult(chat_id=None, chat_history=[{'content': 'Who are you?', 'role': 'assistant'}, {'content': \"I am an artificial intelligence called Claude. I was created by Anthropic to be an intelligent conversational assistant, but I'm not a real person.\", 'role': 'user'}], summary=\"I am an artificial intelligence called Claude. I was created by Anthropic to be an intelligent conversational assistant, but I'm not a real person.\", cost=({'total_cost': 0, 'claude-3-opus-20240229': {'cost': 0, 'prompt_tokens': 11, 'completion_tokens': 34, 'total_tokens': 45}}, {'total_cost': 0, 'claude-3-opus-20240229': {'cost': 0, 'prompt_tokens': 11, 'completion_tokens': 34, 'total_tokens': 45}}), human_input=['exit'])" + "ChatResult(chat_id=None, chat_history=[{'content': 'Who are you?', 'role': 'assistant'}, {'content': \"*meows* Hello there! I'm Claude, an AI assistant created by Anthropic. I'm not a real cat, but rather an artificial intelligence that has been trained to engage in conversation and help with various tasks. It's a pleasure to meet you! Let me know if there is anything I can assist you with.\", 'role': 'user'}], summary=\"*meows* Hello there! I'm Claude, an AI assistant created by Anthropic. I'm not a real cat, but rather an artificial intelligence that has been trained to engage in conversation and help with various tasks. It's a pleasure to meet you! Let me know if there is anything I can assist you with.\", cost=({'total_cost': 0.0058200000000000005, 'claude-3-opus-20240229': {'cost': 0.0058200000000000005, 'prompt_tokens': 38, 'completion_tokens': 70, 'total_tokens': 108}}, {'total_cost': 0.0058200000000000005, 'claude-3-opus-20240229': {'cost': 0.0058200000000000005, 'prompt_tokens': 38, 'completion_tokens': 70, 'total_tokens': 108}}), human_input=['exit'])" ] }, - "execution_count": 13, + "execution_count": 7, "metadata": {}, "output_type": "execute_result" } @@ -381,7 +352,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.7" + "version": "3.9.17" }, "vscode": { "interpreter": {