From 364f0d580c4a47454e2475e226ca19f72d27129a Mon Sep 17 00:00:00 2001 From: Davor Runje Date: Fri, 17 Jan 2025 12:13:11 +0100 Subject: [PATCH 1/3] wip --- autogen/agentchat/conversable_agent.py | 12 +- autogen/agentchat/initiate_chat.py | 34 + notebook/deepseek.ipynb | 1056 ++++++++++++++++++++++++ pyproject.toml | 1 + 4 files changed, 1102 insertions(+), 1 deletion(-) create mode 100644 autogen/agentchat/initiate_chat.py create mode 100644 notebook/deepseek.ipynb diff --git a/autogen/agentchat/conversable_agent.py b/autogen/agentchat/conversable_agent.py index 1812c56600..d7c60da7c8 100644 --- a/autogen/agentchat/conversable_agent.py +++ b/autogen/agentchat/conversable_agent.py @@ -14,6 +14,7 @@ import warnings from collections import defaultdict from typing import ( + TYPE_CHECKING, Any, Callable, Literal, @@ -66,8 +67,13 @@ F = TypeVar("F", bound=Callable[..., Any]) +if TYPE_CHECKING: + # checks if ConversableAgent is implementing LLMAgent protocol + def create_conversible_agent(name: str) -> LLMAgent: + return ConversableAgent(name) -class ConversableAgent(LLMAgent): + +class ConversableAgent: """(In preview) A class for generic conversable agents which can be configured as assistant or user proxy. After receiving each message, the agent will send a reply to the sender unless the msg is a termination msg. @@ -284,6 +290,10 @@ def __init__( "update_agent_state": [], } + # check if the agent is implementing LLMAgent protocol + if not isinstance(self, LLMAgent): + raise TypeError("ConversableAgent must implement LLMAgent protocol") + def _validate_name(self, name: str) -> None: # Validation for name using regex to detect any whitespace if re.search(r"\s", name): diff --git a/autogen/agentchat/initiate_chat.py b/autogen/agentchat/initiate_chat.py new file mode 100644 index 0000000000..f699fe48e9 --- /dev/null +++ b/autogen/agentchat/initiate_chat.py @@ -0,0 +1,34 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import TYPE_CHECKING, Any, Iterable, Optional, Protocol, Union + +from .agent import Agent + +if TYPE_CHECKING: + from ..messages import BaseMessage + + +class ResponseProtocol(Protocol): + @property + def messages(self) -> Iterable["BaseMessage"]: + """The messages received by the agent.""" + ... + +class AsyncResponseProtocol(Protocol): + @property + def messages(self) -> Iterable["BaseMessage"]: + """The messages received by the agent.""" + ... + + +def initiate_chat( + agent: Agent, *, message: Union[str, dict[str, Any]], recipient: Optional[Agent], request_reply: bool = False +) -> ResponseProtocol: + pass + +def a_initiate_chat( + agent: Agent, *, message: Union[str, dict[str, Any]], recipient: Optional[Agent], request_reply: bool = False +) -> ResponseProtocol: + pass \ No newline at end of file diff --git a/notebook/deepseek.ipynb b/notebook/deepseek.ipynb new file mode 100644 index 0000000000..74988c61db --- /dev/null +++ b/notebook/deepseek.ipynb @@ -0,0 +1,1056 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: nest_asyncio in /work/davor/projects/airt/ag2/.venv-3.9/lib/python3.9/site-packages (1.6.0)\n" + ] + } + ], + "source": [ + "!pip install nest_asyncio" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "import nest_asyncio\n", + "\n", + "nest_asyncio.apply()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[33muser_proxy\u001b[0m (to assistant):\n", + "\n", + "Tell me a joke about NVDA and TESLA stock prices.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[32m\u001b[0m\n", + "\n", + "\u001b[32mSure\u001b[0m\n", + "\n", + "\u001b[32m!\u001b[0m\n", + "\n", + "\u001b[32m Here\u001b[0m\n", + "\n", + "\u001b[32m's\u001b[0m\n", + "\n", + "\u001b[32m a\u001b[0m\n", + "\n", + "\u001b[32m light\u001b[0m\n", + "\n", + "\u001b[32m-hearted\u001b[0m\n", + "\n", + "\u001b[32m joke\u001b[0m\n", + "\n", + "\u001b[32m for\u001b[0m\n", + "\n", + "\u001b[32m you\u001b[0m\n", + "\n", + "\u001b[32m:\n", + "\n", + "\u001b[0m\n", + "\n", + "\u001b[32mWhy\u001b[0m\n", + "\n", + "\u001b[32m did\u001b[0m\n", + "\n", + "\u001b[32m NV\u001b[0m\n", + "\n", + "\u001b[32mDA\u001b[0m\n", + "\n", + "\u001b[32m and\u001b[0m\n", + "\n", + "\u001b[32m TES\u001b[0m\n", + "\n", + "\u001b[32mLA\u001b[0m\n", + "\n", + "\u001b[32m investors\u001b[0m\n", + "\n", + "\u001b[32m break\u001b[0m\n", + "\n", + "\u001b[32m up\u001b[0m\n", + "\n", + "\u001b[32m?\n", + "\n", + "\u001b[0m\n", + "\n", + "\u001b[32mBecause\u001b[0m\n", + "\n", + "\u001b[32m NV\u001b[0m\n", + "\n", + "\u001b[32mDA\u001b[0m\n", + "\n", + "\u001b[32m kept\u001b[0m\n", + "\n", + "\u001b[32m rendering\u001b[0m\n", + "\n", + "\u001b[32m their\u001b[0m\n", + "\n", + "\u001b[32m future\u001b[0m\n", + "\n", + "\u001b[32m together\u001b[0m\n", + "\n", + "\u001b[32m,\u001b[0m\n", + "\n", + "\u001b[32m but\u001b[0m\n", + "\n", + "\u001b[32m TES\u001b[0m\n", + "\n", + "\u001b[32mLA\u001b[0m\n", + "\n", + "\u001b[32m was\u001b[0m\n", + "\n", + "\u001b[32m always\u001b[0m\n", + "\n", + "\u001b[32m driving\u001b[0m\n", + "\n", + "\u001b[32m them\u001b[0m\n", + "\n", + "\u001b[32m in\u001b[0m\n", + "\n", + "\u001b[32m circles\u001b[0m\n", + "\n", + "\u001b[32m!\u001b[0m\n", + "\n", + "\u001b[32m πŸš—\u001b[0m\n", + "\n", + "\u001b[32mπŸ’¨\u001b[0m\n", + "\n", + "\u001b[32mπŸ’»\u001b[0m\n", + "\n", + "\u001b[32m\n", + "\n", + "\u001b[0m\n", + "\n", + "\u001b[32mHope\u001b[0m\n", + "\n", + "\u001b[32m that\u001b[0m\n", + "\n", + "\u001b[32m gave\u001b[0m\n", + "\n", + "\u001b[32m you\u001b[0m\n", + "\n", + "\u001b[32m a\u001b[0m\n", + "\n", + "\u001b[32m chuckle\u001b[0m\n", + "\n", + "\u001b[32m!\u001b[0m\n", + "\n", + "\u001b[32m πŸ˜„\u001b[0m\n", + "\n", + "\u001b[32m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Model deepseek-chat not found. Using cl100k_base encoding.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[0m\n", + "\n", + "\u001b[33massistant\u001b[0m (to user_proxy):\n", + "\n", + "Sure! Here's a light-hearted joke for you:\n", + "\n", + "Why did NVDA and TESLA investors break up?\n", + "\n", + "Because NVDA kept rendering their future together, but TESLA was always driving them in circles! πŸš—πŸ’¨πŸ’»\n", + "\n", + "Hope that gave you a chuckle! πŸ˜„\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33muser_proxy\u001b[0m (to assistant):\n", + "\n", + "\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[32m\u001b[0m\n", + "\n", + "\u001b[32mGl\u001b[0m\n", + "\n", + "\u001b[32mad\u001b[0m\n", + "\n", + "\u001b[32m you\u001b[0m\n", + "\n", + "\u001b[32m enjoyed\u001b[0m\n", + "\n", + "\u001b[32m it\u001b[0m\n", + "\n", + "\u001b[32m!\u001b[0m\n", + "\n", + "\u001b[32m Here\u001b[0m\n", + "\n", + "\u001b[32m's\u001b[0m\n", + "\n", + "\u001b[32m another\u001b[0m\n", + "\n", + "\u001b[32m one\u001b[0m\n", + "\n", + "\u001b[32m:\n", + "\n", + "\u001b[0m\n", + "\n", + "\u001b[32mWhy\u001b[0m\n", + "\n", + "\u001b[32m did\u001b[0m\n", + "\n", + "\u001b[32m the\u001b[0m\n", + "\n", + "\u001b[32m NV\u001b[0m\n", + "\n", + "\u001b[32mDA\u001b[0m\n", + "\n", + "\u001b[32m and\u001b[0m\n", + "\n", + "\u001b[32m TES\u001b[0m\n", + "\n", + "\u001b[32mLA\u001b[0m\n", + "\n", + "\u001b[32m stock\u001b[0m\n", + "\n", + "\u001b[32m prices\u001b[0m\n", + "\n", + "\u001b[32m go\u001b[0m\n", + "\n", + "\u001b[32m to\u001b[0m\n", + "\n", + "\u001b[32m couples\u001b[0m\n", + "\n", + "\u001b[32m therapy\u001b[0m\n", + "\n", + "\u001b[32m?\n", + "\n", + "\u001b[0m\n", + "\n", + "\u001b[32mBecause\u001b[0m\n", + "\n", + "\u001b[32m NV\u001b[0m\n", + "\n", + "\u001b[32mDA\u001b[0m\n", + "\n", + "\u001b[32m was\u001b[0m\n", + "\n", + "\u001b[32m always\u001b[0m\n", + "\n", + "\u001b[32m trying\u001b[0m\n", + "\n", + "\u001b[32m to\u001b[0m\n", + "\n", + "\u001b[32m process\u001b[0m\n", + "\n", + "\u001b[32m things\u001b[0m\n", + "\n", + "\u001b[32m too\u001b[0m\n", + "\n", + "\u001b[32m quickly\u001b[0m\n", + "\n", + "\u001b[32m,\u001b[0m\n", + "\n", + "\u001b[32m while\u001b[0m\n", + "\n", + "\u001b[32m TES\u001b[0m\n", + "\n", + "\u001b[32mLA\u001b[0m\n", + "\n", + "\u001b[32m kept\u001b[0m\n", + "\n", + "\u001b[32m taking\u001b[0m\n", + "\n", + "\u001b[32m them\u001b[0m\n", + "\n", + "\u001b[32m on\u001b[0m\n", + "\n", + "\u001b[32m wild\u001b[0m\n", + "\n", + "\u001b[32m,\u001b[0m\n", + "\n", + "\u001b[32m unpredictable\u001b[0m\n", + "\n", + "\u001b[32m rides\u001b[0m\n", + "\n", + "\u001b[32m!\u001b[0m\n", + "\n", + "\u001b[32m πŸš€\u001b[0m\n", + "\n", + "\u001b[32mπŸ’¨\u001b[0m\n", + "\n", + "\u001b[32m\n", + "\n", + "\u001b[0m\n", + "\n", + "\u001b[32mInvest\u001b[0m\n", + "\n", + "\u001b[32ming\u001b[0m\n", + "\n", + "\u001b[32m in\u001b[0m\n", + "\n", + "\u001b[32m them\u001b[0m\n", + "\n", + "\u001b[32m is\u001b[0m\n", + "\n", + "\u001b[32m a\u001b[0m\n", + "\n", + "\u001b[32m roller\u001b[0m\n", + "\n", + "\u001b[32mco\u001b[0m\n", + "\n", + "\u001b[32master\u001b[0m\n", + "\n", + "\u001b[32m,\u001b[0m\n", + "\n", + "\u001b[32m but\u001b[0m\n", + "\n", + "\u001b[32m at\u001b[0m\n", + "\n", + "\u001b[32m least\u001b[0m\n", + "\n", + "\u001b[32m it\u001b[0m\n", + "\n", + "\u001b[32m's\u001b[0m\n", + "\n", + "\u001b[32m never\u001b[0m\n", + "\n", + "\u001b[32m boring\u001b[0m\n", + "\n", + "\u001b[32m!\u001b[0m\n", + "\n", + "\u001b[32m πŸ˜„\u001b[0m\n", + "\n", + "\u001b[32m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Model deepseek-chat not found. Using cl100k_base encoding.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[0m\n", + "\n", + "\u001b[33massistant\u001b[0m (to user_proxy):\n", + "\n", + "Glad you enjoyed it! Here's another one:\n", + "\n", + "Why did the NVDA and TESLA stock prices go to couples therapy?\n", + "\n", + "Because NVDA was always trying to process things too quickly, while TESLA kept taking them on wild, unpredictable rides! πŸš€πŸ’¨\n", + "\n", + "Investing in them is a rollercoaster, but at least it's never boring! πŸ˜„\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33muser_proxy\u001b[0m (to assistant):\n", + "\n", + "\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[32m\u001b[0m\n", + "\n", + "\u001b[32mAbsolutely\u001b[0m\n", + "\n", + "\u001b[32m!\u001b[0m\n", + "\n", + "\u001b[32m Here\u001b[0m\n", + "\n", + "\u001b[32m's\u001b[0m\n", + "\n", + "\u001b[32m one\u001b[0m\n", + "\n", + "\u001b[32m more\u001b[0m\n", + "\n", + "\u001b[32m for\u001b[0m\n", + "\n", + "\u001b[32m the\u001b[0m\n", + "\n", + "\u001b[32m road\u001b[0m\n", + "\n", + "\u001b[32m:\n", + "\n", + "\u001b[0m\n", + "\n", + "\u001b[32mWhy\u001b[0m\n", + "\n", + "\u001b[32m did\u001b[0m\n", + "\n", + "\u001b[32m NV\u001b[0m\n", + "\n", + "\u001b[32mDA\u001b[0m\n", + "\n", + "\u001b[32m and\u001b[0m\n", + "\n", + "\u001b[32m TES\u001b[0m\n", + "\n", + "\u001b[32mLA\u001b[0m\n", + "\n", + "\u001b[32m stocks\u001b[0m\n", + "\n", + "\u001b[32m go\u001b[0m\n", + "\n", + "\u001b[32m to\u001b[0m\n", + "\n", + "\u001b[32m the\u001b[0m\n", + "\n", + "\u001b[32m same\u001b[0m\n", + "\n", + "\u001b[32m party\u001b[0m\n", + "\n", + "\u001b[32m?\n", + "\n", + "\u001b[0m\n", + "\n", + "\u001b[32mBecause\u001b[0m\n", + "\n", + "\u001b[32m NV\u001b[0m\n", + "\n", + "\u001b[32mDA\u001b[0m\n", + "\n", + "\u001b[32m brought\u001b[0m\n", + "\n", + "\u001b[32m the\u001b[0m\n", + "\n", + "\u001b[32m *\u001b[0m\n", + "\n", + "\u001b[32mgraph\u001b[0m\n", + "\n", + "\u001b[32mics\u001b[0m\n", + "\n", + "\u001b[32m*\u001b[0m\n", + "\n", + "\u001b[32m and\u001b[0m\n", + "\n", + "\u001b[32m TES\u001b[0m\n", + "\n", + "\u001b[32mLA\u001b[0m\n", + "\n", + "\u001b[32m brought\u001b[0m\n", + "\n", + "\u001b[32m the\u001b[0m\n", + "\n", + "\u001b[32m *\u001b[0m\n", + "\n", + "\u001b[32menergy\u001b[0m\n", + "\n", + "\u001b[32m*\u001b[0m\n", + "\n", + "\u001b[32mβ€”\u001b[0m\n", + "\n", + "\u001b[32mbut\u001b[0m\n", + "\n", + "\u001b[32m by\u001b[0m\n", + "\n", + "\u001b[32m the\u001b[0m\n", + "\n", + "\u001b[32m end\u001b[0m\n", + "\n", + "\u001b[32m of\u001b[0m\n", + "\n", + "\u001b[32m the\u001b[0m\n", + "\n", + "\u001b[32m night\u001b[0m\n", + "\n", + "\u001b[32m,\u001b[0m\n", + "\n", + "\u001b[32m they\u001b[0m\n", + "\n", + "\u001b[32m both\u001b[0m\n", + "\n", + "\u001b[32m crashed\u001b[0m\n", + "\n", + "\u001b[32m!\u001b[0m\n", + "\n", + "\u001b[32m πŸ’₯\u001b[0m\n", + "\n", + "\u001b[32mπŸ“‰\u001b[0m\n", + "\n", + "\u001b[32mπŸ˜…\u001b[0m\n", + "\n", + "\u001b[32m\n", + "\n", + "\u001b[0m\n", + "\n", + "\u001b[32mInvest\u001b[0m\n", + "\n", + "\u001b[32ming\u001b[0m\n", + "\n", + "\u001b[32m in\u001b[0m\n", + "\n", + "\u001b[32m tech\u001b[0m\n", + "\n", + "\u001b[32m and\u001b[0m\n", + "\n", + "\u001b[32m EVs\u001b[0m\n", + "\n", + "\u001b[32mβ€”\u001b[0m\n", + "\n", + "\u001b[32malways\u001b[0m\n", + "\n", + "\u001b[32m a\u001b[0m\n", + "\n", + "\u001b[32m wild\u001b[0m\n", + "\n", + "\u001b[32m ride\u001b[0m\n", + "\n", + "\u001b[32m!\u001b[0m\n", + "\n", + "\u001b[32m 🎒\u001b[0m\n", + "\n", + "\u001b[32m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Model deepseek-chat not found. Using cl100k_base encoding.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[0m\n", + "\n", + "\u001b[33massistant\u001b[0m (to user_proxy):\n", + "\n", + "Absolutely! Here's one more for the road:\n", + "\n", + "Why did NVDA and TESLA stocks go to the same party?\n", + "\n", + "Because NVDA brought the *graphics* and TESLA brought the *energy*β€”but by the end of the night, they both crashed! πŸ’₯πŸ“‰πŸ˜…\n", + "\n", + "Investing in tech and EVsβ€”always a wild ride! 🎒\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33muser_proxy\u001b[0m (to assistant):\n", + "\n", + "\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[32m\u001b[0m\n", + "\n", + "\u001b[32mH\u001b[0m\n", + "\n", + "\u001b[32maha\u001b[0m\n", + "\n", + "\u001b[32m,\u001b[0m\n", + "\n", + "\u001b[32m love\u001b[0m\n", + "\n", + "\u001b[32m it\u001b[0m\n", + "\n", + "\u001b[32m!\u001b[0m\n", + "\n", + "\u001b[32m Here\u001b[0m\n", + "\n", + "\u001b[32m's\u001b[0m\n", + "\n", + "\u001b[32m a\u001b[0m\n", + "\n", + "\u001b[32m bonus\u001b[0m\n", + "\n", + "\u001b[32m one\u001b[0m\n", + "\n", + "\u001b[32m for\u001b[0m\n", + "\n", + "\u001b[32m you\u001b[0m\n", + "\n", + "\u001b[32m:\n", + "\n", + "\u001b[0m\n", + "\n", + "\u001b[32mWhy\u001b[0m\n", + "\n", + "\u001b[32m did\u001b[0m\n", + "\n", + "\u001b[32m the\u001b[0m\n", + "\n", + "\u001b[32m NV\u001b[0m\n", + "\n", + "\u001b[32mDA\u001b[0m\n", + "\n", + "\u001b[32m and\u001b[0m\n", + "\n", + "\u001b[32m TES\u001b[0m\n", + "\n", + "\u001b[32mLA\u001b[0m\n", + "\n", + "\u001b[32m investors\u001b[0m\n", + "\n", + "\u001b[32m start\u001b[0m\n", + "\n", + "\u001b[32m a\u001b[0m\n", + "\n", + "\u001b[32m band\u001b[0m\n", + "\n", + "\u001b[32m?\n", + "\n", + "\u001b[0m\n", + "\n", + "\u001b[32mBecause\u001b[0m\n", + "\n", + "\u001b[32m NV\u001b[0m\n", + "\n", + "\u001b[32mDA\u001b[0m\n", + "\n", + "\u001b[32m was\u001b[0m\n", + "\n", + "\u001b[32m great\u001b[0m\n", + "\n", + "\u001b[32m at\u001b[0m\n", + "\n", + "\u001b[32m *\u001b[0m\n", + "\n", + "\u001b[32mprocessing\u001b[0m\n", + "\n", + "\u001b[32m*\u001b[0m\n", + "\n", + "\u001b[32m the\u001b[0m\n", + "\n", + "\u001b[32m beats\u001b[0m\n", + "\n", + "\u001b[32m,\u001b[0m\n", + "\n", + "\u001b[32m and\u001b[0m\n", + "\n", + "\u001b[32m TES\u001b[0m\n", + "\n", + "\u001b[32mLA\u001b[0m\n", + "\n", + "\u001b[32m knew\u001b[0m\n", + "\n", + "\u001b[32m how\u001b[0m\n", + "\n", + "\u001b[32m to\u001b[0m\n", + "\n", + "\u001b[32m *\u001b[0m\n", + "\n", + "\u001b[32mamp\u001b[0m\n", + "\n", + "\u001b[32m up\u001b[0m\n", + "\n", + "\u001b[32m*\u001b[0m\n", + "\n", + "\u001b[32m the\u001b[0m\n", + "\n", + "\u001b[32m energy\u001b[0m\n", + "\n", + "\u001b[32mβ€”\u001b[0m\n", + "\n", + "\u001b[32mbut\u001b[0m\n", + "\n", + "\u001b[32m their\u001b[0m\n", + "\n", + "\u001b[32m portfolio\u001b[0m\n", + "\n", + "\u001b[32m still\u001b[0m\n", + "\n", + "\u001b[32m ended\u001b[0m\n", + "\n", + "\u001b[32m up\u001b[0m\n", + "\n", + "\u001b[32m sounding\u001b[0m\n", + "\n", + "\u001b[32m like\u001b[0m\n", + "\n", + "\u001b[32m a\u001b[0m\n", + "\n", + "\u001b[32m broken\u001b[0m\n", + "\n", + "\u001b[32m record\u001b[0m\n", + "\n", + "\u001b[32m!\u001b[0m\n", + "\n", + "\u001b[32m 🎸\u001b[0m\n", + "\n", + "\u001b[32mπŸ”‹\u001b[0m\n", + "\n", + "\u001b[32mπŸ“Š\u001b[0m\n", + "\n", + "\u001b[32m\n", + "\n", + "\u001b[0m\n", + "\n", + "\u001b[32mInvest\u001b[0m\n", + "\n", + "\u001b[32ming\u001b[0m\n", + "\n", + "\u001b[32m in\u001b[0m\n", + "\n", + "\u001b[32m innovation\u001b[0m\n", + "\n", + "\u001b[32m is\u001b[0m\n", + "\n", + "\u001b[32m never\u001b[0m\n", + "\n", + "\u001b[32m a\u001b[0m\n", + "\n", + "\u001b[32m dull\u001b[0m\n", + "\n", + "\u001b[32m moment\u001b[0m\n", + "\n", + "\u001b[32m!\u001b[0m\n", + "\n", + "\u001b[32m πŸ˜„\u001b[0m\n", + "\n", + "\u001b[32m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Model deepseek-chat not found. Using cl100k_base encoding.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[0m\n", + "\n", + "\u001b[33massistant\u001b[0m (to user_proxy):\n", + "\n", + "Haha, love it! Here's a bonus one for you:\n", + "\n", + "Why did the NVDA and TESLA investors start a band?\n", + "\n", + "Because NVDA was great at *processing* the beats, and TESLA knew how to *amp up* the energyβ€”but their portfolio still ended up sounding like a broken record! πŸŽΈπŸ”‹πŸ“Š\n", + "\n", + "Investing in innovation is never a dull moment! πŸ˜„\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33muser_proxy\u001b[0m (to assistant):\n", + "\n", + "\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[32m\u001b[0m\n", + "\n", + "\u001b[32mYou\u001b[0m\n", + "\n", + "\u001b[32m're\u001b[0m\n", + "\n", + "\u001b[32m on\u001b[0m\n", + "\n", + "\u001b[32m a\u001b[0m\n", + "\n", + "\u001b[32m roll\u001b[0m\n", + "\n", + "\u001b[32m!\u001b[0m\n", + "\n", + "\u001b[32m Here\u001b[0m\n", + "\n", + "\u001b[32m's\u001b[0m\n", + "\n", + "\u001b[32m one\u001b[0m\n", + "\n", + "\u001b[32m last\u001b[0m\n", + "\n", + "\u001b[32m one\u001b[0m\n", + "\n", + "\u001b[32m to\u001b[0m\n", + "\n", + "\u001b[32m keep\u001b[0m\n", + "\n", + "\u001b[32m the\u001b[0m\n", + "\n", + "\u001b[32m laughs\u001b[0m\n", + "\n", + "\u001b[32m going\u001b[0m\n", + "\n", + "\u001b[32m:\n", + "\n", + "\u001b[0m\n", + "\n", + "\u001b[32mWhy\u001b[0m\n", + "\n", + "\u001b[32m did\u001b[0m\n", + "\n", + "\u001b[32m NV\u001b[0m\n", + "\n", + "\u001b[32mDA\u001b[0m\n", + "\n", + "\u001b[32m and\u001b[0m\n", + "\n", + "\u001b[32m TES\u001b[0m\n", + "\n", + "\u001b[32mLA\u001b[0m\n", + "\n", + "\u001b[32m stocks\u001b[0m\n", + "\n", + "\u001b[32m get\u001b[0m\n", + "\n", + "\u001b[32m invited\u001b[0m\n", + "\n", + "\u001b[32m to\u001b[0m\n", + "\n", + "\u001b[32m the\u001b[0m\n", + "\n", + "\u001b[32m same\u001b[0m\n", + "\n", + "\u001b[32m comedy\u001b[0m\n", + "\n", + "\u001b[32m show\u001b[0m\n", + "\n", + "\u001b[32m?\n", + "\n", + "\u001b[0m\n", + "\n", + "\u001b[32mBecause\u001b[0m\n", + "\n", + "\u001b[32m NV\u001b[0m\n", + "\n", + "\u001b[32mDA\u001b[0m\n", + "\n", + "\u001b[32m had\u001b[0m\n", + "\n", + "\u001b[32m everyone\u001b[0m\n", + "\n", + "\u001b[32m *\u001b[0m\n", + "\n", + "\u001b[32mdy\u001b[0m\n", + "\n", + "\u001b[32ming\u001b[0m\n", + "\n", + "\u001b[32m of\u001b[0m\n", + "\n", + "\u001b[32m laughter\u001b[0m\n", + "\n", + "\u001b[32m*\u001b[0m\n", + "\n", + "\u001b[32m with\u001b[0m\n", + "\n", + "\u001b[32m its\u001b[0m\n", + "\n", + "\u001b[32m performance\u001b[0m\n", + "\n", + "\u001b[32m,\u001b[0m\n", + "\n", + "\u001b[32m and\u001b[0m\n", + "\n", + "\u001b[32m TES\u001b[0m\n", + "\n", + "\u001b[32mLA\u001b[0m\n", + "\n", + "\u001b[32m left\u001b[0m\n", + "\n", + "\u001b[32m the\u001b[0m\n", + "\n", + "\u001b[32m audience\u001b[0m\n", + "\n", + "\u001b[32m *\u001b[0m\n", + "\n", + "\u001b[32melect\u001b[0m\n", + "\n", + "\u001b[32mr\u001b[0m\n", + "\n", + "\u001b[32mified\u001b[0m\n", + "\n", + "\u001b[32m*\u001b[0m\n", + "\n", + "\u001b[32mβ€”\u001b[0m\n", + "\n", + "\u001b[32mbut\u001b[0m\n", + "\n", + "\u001b[32m the\u001b[0m\n", + "\n", + "\u001b[32m punch\u001b[0m\n", + "\n", + "\u001b[32mline\u001b[0m\n", + "\n", + "\u001b[32m was\u001b[0m\n", + "\n", + "\u001b[32m always\u001b[0m\n", + "\n", + "\u001b[32m the\u001b[0m\n", + "\n", + "\u001b[32m same\u001b[0m\n", + "\n", + "\u001b[32m:\u001b[0m\n", + "\n", + "\u001b[32m \"\u001b[0m\n", + "\n", + "\u001b[32mShould\u001b[0m\n", + "\n", + "\u001b[32m've\u001b[0m\n", + "\n", + "\u001b[32m bought\u001b[0m\n", + "\n", + "\u001b[32m the\u001b[0m\n", + "\n", + "\u001b[32m dip\u001b[0m\n", + "\n", + "\u001b[32m!\"\u001b[0m\n", + "\n", + "\u001b[32m 🎀\u001b[0m\n", + "\n", + "\u001b[32m⚑\u001b[0m\n", + "\n", + "\u001b[32mπŸ“ˆ\u001b[0m\n", + "\n", + "\u001b[32m\n", + "\n", + "\u001b[0m\n", + "\n", + "\u001b[32mInvest\u001b[0m\n", + "\n", + "\u001b[32ming\u001b[0m\n", + "\n", + "\u001b[32m humor\u001b[0m\n", + "\n", + "\u001b[32mβ€”\u001b[0m\n", + "\n", + "\u001b[32mit\u001b[0m\n", + "\n", + "\u001b[32m's\u001b[0m\n", + "\n", + "\u001b[32m a\u001b[0m\n", + "\n", + "\u001b[32m volatile\u001b[0m\n", + "\n", + "\u001b[32m art\u001b[0m\n", + "\n", + "\u001b[32m!\u001b[0m\n", + "\n", + "\u001b[32m πŸ˜‚\u001b[0m\n", + "\n", + "\u001b[32m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Model deepseek-chat not found. Using cl100k_base encoding.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[0m\n", + "\n", + "\u001b[33massistant\u001b[0m (to user_proxy):\n", + "\n", + "You're on a roll! Here's one last one to keep the laughs going:\n", + "\n", + "Why did NVDA and TESLA stocks get invited to the same comedy show?\n", + "\n", + "Because NVDA had everyone *dying of laughter* with its performance, and TESLA left the audience *electrified*β€”but the punchline was always the same: \"Should've bought the dip!\" πŸŽ€βš‘πŸ“ˆ\n", + "\n", + "Investing humorβ€”it's a volatile art! πŸ˜‚\n", + "\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "data": { + "text/plain": [ + "ChatResult(chat_id=None, chat_history=[{'content': 'Tell me a joke about NVDA and TESLA stock prices.', 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"Sure! Here's a light-hearted joke for you:\\n\\nWhy did NVDA and TESLA investors break up?\\n\\nBecause NVDA kept rendering their future together, but TESLA was always driving them in circles! πŸš—πŸ’¨πŸ’»\\n\\nHope that gave you a chuckle! πŸ˜„\", 'role': 'user', 'name': 'assistant'}, {'content': '', 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"Glad you enjoyed it! Here's another one:\\n\\nWhy did the NVDA and TESLA stock prices go to couples therapy?\\n\\nBecause NVDA was always trying to process things too quickly, while TESLA kept taking them on wild, unpredictable rides! πŸš€πŸ’¨\\n\\nInvesting in them is a rollercoaster, but at least it's never boring! πŸ˜„\", 'role': 'user', 'name': 'assistant'}, {'content': '', 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"Absolutely! Here's one more for the road:\\n\\nWhy did NVDA and TESLA stocks go to the same party?\\n\\nBecause NVDA brought the *graphics* and TESLA brought the *energy*β€”but by the end of the night, they both crashed! πŸ’₯πŸ“‰πŸ˜…\\n\\nInvesting in tech and EVsβ€”always a wild ride! 🎒\", 'role': 'user', 'name': 'assistant'}, {'content': '', 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"Haha, love it! Here's a bonus one for you:\\n\\nWhy did the NVDA and TESLA investors start a band?\\n\\nBecause NVDA was great at *processing* the beats, and TESLA knew how to *amp up* the energyβ€”but their portfolio still ended up sounding like a broken record! πŸŽΈπŸ”‹πŸ“Š\\n\\nInvesting in innovation is never a dull moment! πŸ˜„\", 'role': 'user', 'name': 'assistant'}, {'content': '', 'role': 'assistant', 'name': 'user_proxy'}, {'content': 'You\\'re on a roll! Here\\'s one last one to keep the laughs going:\\n\\nWhy did NVDA and TESLA stocks get invited to the same comedy show?\\n\\nBecause NVDA had everyone *dying of laughter* with its performance, and TESLA left the audience *electrified*β€”but the punchline was always the same: \"Should\\'ve bought the dip!\" πŸŽ€βš‘πŸ“ˆ\\n\\nInvesting humorβ€”it\\'s a volatile art! πŸ˜‚', 'role': 'user', 'name': 'assistant'}], summary='You\\'re on a roll! Here\\'s one last one to keep the laughs going:\\n\\nWhy did NVDA and TESLA stocks get invited to the same comedy show?\\n\\nBecause NVDA had everyone *dying of laughter* with its performance, and TESLA left the audience *electrified*β€”but the punchline was always the same: \"Should\\'ve bought the dip!\" πŸŽ€βš‘πŸ“ˆ\\n\\nInvesting humorβ€”it\\'s a volatile art! πŸ˜‚', cost={'usage_including_cached_inference': {'total_cost': 0.0002492, 'deepseek-chat': {'cost': 0.0002492, 'prompt_tokens': 1028, 'completion_tokens': 376, 'total_tokens': 1404}}, 'usage_excluding_cached_inference': {'total_cost': 0.0002492, 'deepseek-chat': {'cost': 0.0002492, 'prompt_tokens': 1028, 'completion_tokens': 376, 'total_tokens': 1404}}}, human_input=[])" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from tempfile import TemporaryDirectory\n", + "\n", + "from autogen import Cache, ConversableAgent, UserProxyAgent\n", + "\n", + "# llm_config = {\"model\": \"gpt-4o-mini\", \"api_key\": os.environ[\"OPENAI_API_KEY\"]}\n", + "\n", + "llm_config = {\n", + " \"model\": \"deepseek-chat\",\n", + " \"api_type\": \"deepseek\",\n", + " \"api_key\": \"sk-1ae624abdf6244f5aa189611001e4a6c\",\n", + " \"base_url\": \"https://api.deepseek.com\",\n", + " \"price\": [0.00014, 0.00028],\n", + " \"stream\": True,\n", + "}\n", + "\n", + "assistant = ConversableAgent(\"assistant\", llm_config=llm_config, human_input_mode=\"NEVER\")\n", + "user_proxy = UserProxyAgent(\"user_proxy\", human_input_mode=\"NEVER\")\n", + "\n", + "# message=[{\"content\": \"Tell me a joke about NVDA and TESLA stock prices.\", \"role\": \"user\"}]\n", + "message = \"Tell me a joke about NVDA and TESLA stock prices.\"\n", + "\n", + "with TemporaryDirectory() as temp_dir:\n", + " with Cache.disk(cache_path_root=temp_dir) as cache:\n", + " response = user_proxy.initiate_chat(assistant, message=message, max_turns=5, cache=cache)\n", + "\n", + "# response = user_proxy.initiate_chat(assistant, message=message, max_turns=5)\n", + "# response = await user_proxy.a_initiate_chat(assistant, message=message, max_turns=5)\n", + "\n", + "# response = assistant.generate_oai_reply(messages=message)\n", + "# response = await assistant.a_generate_oai_reply(messages=message)\n", + "# response = assistant.generate_reply(messages=message)\n", + "# response = await assistant.a_generate_reply(messages=message)\n", + "# print(response)\n", + "\n", + "# for chunk in response:\n", + "# print('_new_chunk_')\n", + "# print(chunk)\n", + "\n", + "response" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv-3.9", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.21" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/pyproject.toml b/pyproject.toml index 5cba779e76..7ad9617a00 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -342,6 +342,7 @@ files = [ "autogen/interop", "autogen/agentchat/realtime_agent", "autogen/messages", + "autogen/agentchat/initiate_chat.py", "test/test_pydantic.py", "test/io", "test/tools", From 0fa1e5a3a5d2d8241c830271c89a3c4b186c7064 Mon Sep 17 00:00:00 2001 From: Davor Runje Date: Fri, 17 Jan 2025 16:10:05 +0100 Subject: [PATCH 2/3] wip --- autogen/agentchat/experimental/__init__.py | 3 + .../agentchat/experimental/chat_context.py | 107 ++++++++++++++ .../agentchat/experimental/initiate_chat.py | 134 ++++++++++++++++++ autogen/agentchat/initiate_chat.py | 34 ----- autogen/io/console.py | 5 +- pyproject.toml | 2 +- test/agentchat/experimental/__init__.py | 0 .../experimental/test_initiate_chat.py | 6 + 8 files changed, 253 insertions(+), 38 deletions(-) create mode 100644 autogen/agentchat/experimental/__init__.py create mode 100644 autogen/agentchat/experimental/chat_context.py create mode 100644 autogen/agentchat/experimental/initiate_chat.py delete mode 100644 autogen/agentchat/initiate_chat.py create mode 100644 test/agentchat/experimental/__init__.py create mode 100644 test/agentchat/experimental/test_initiate_chat.py diff --git a/autogen/agentchat/experimental/__init__.py b/autogen/agentchat/experimental/__init__.py new file mode 100644 index 0000000000..52a620b0e7 --- /dev/null +++ b/autogen/agentchat/experimental/__init__.py @@ -0,0 +1,3 @@ +from .initiate_chat import AsyncResponseProtocol, ResponseProtocol, a_initiate_chat, initiate_chat + +__all__ = ["AsyncResponseProtocol", "ResponseProtocol", "a_initiate_chat", "initiate_chat"] diff --git a/autogen/agentchat/experimental/chat_context.py b/autogen/agentchat/experimental/chat_context.py new file mode 100644 index 0000000000..4524f94d02 --- /dev/null +++ b/autogen/agentchat/experimental/chat_context.py @@ -0,0 +1,107 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +from contextvars import ContextVar +from dataclasses import dataclass +from logging import Logger, getLogger +from typing import TYPE_CHECKING, Any, Optional, Protocol, Union, runtime_checkable +from uuid import UUID + +from pydantic import UUID4 + +if TYPE_CHECKING: + from ..agent import Agent + + +@runtime_checkable +class ChatContextProtocol(Protocol): + @property + def initial_agent(self) -> "Agent": + """The agent that initiated the chat.""" + ... + + @property + def agents(self) -> list["Agent"]: + """The agents participating in the chat.""" + ... + + @property + def initial_message(self) -> Union[str, dict[str, Any]]: + """The messages received by the agent.""" + ... + + @property + def messages(self) -> list[dict[str, Any]]: ... + + @property + def logger(self) -> Logger: ... + + @classmethod + def get_registered_chat(cls) -> "ChatContextProtocol": ... + + def __entry__(self) -> "ChatContextProtocol": ... + + def __exit__( + self, exc_type: Optional[type], exc_value: Optional[BaseException], traceback: Optional[Any] + ) -> None: ... + + +@dataclass +class ChatContext: + initial_agent: "Agent" + agents: list["Agent"] + initial_message: Union[str, dict[str, Any]] + messages: list[dict[str, Any]] + logger: Logger + uuid: UUID + + _registered_chats: ContextVar[list["ChatContext"]] = ContextVar("registered_chats", default=[]) + + def __init__( + self, + *, + initial_agent: "Agent", + agents: Optional[list["Agent"]] = None, + initial_message: Union[str, dict[str, Any]], + logger: Optional[Logger] = None, + uuid: Optional[UUID] = None, + ): + self.initial_agent = initial_agent + self.agents = agents or [] + self.initial_message = initial_message + self.messages = [] + self.logger = logger or getLogger(__name__) + self.uuid = uuid or UUID4() + + @classmethod + def get_registered_chat(cls) -> "ChatContext": + registered_chats: list[ChatContext] = cls._registered_chats.get() + if registered_chats: + return registered_chats[-1] + raise ValueError("No registered chats found.") + + def __entry__(self) -> "ChatContext": + registered_chats = ChatContext._registered_chats.get() + registered_chats.append(self) + return self + + def __exit__(self, exc_type: Optional[type], exc_value: Optional[BaseException], traceback: Optional[Any]) -> None: + registered_chats = ChatContext._registered_chats.get() + registered_chats.pop() + + # check if the InitiateChatIOStream implements the IOStream protocol + if TYPE_CHECKING: + + @staticmethod + def _type_check( + *, + initial_agent: "Agent", + agents: Optional[list["Agent"]] = None, + initial_message: Union[str, dict[str, Any]], + logger: Optional[Logger] = None, + uuid: Optional[UUID] = None, + ) -> ChatContextProtocol: + return ChatContext( + initial_agent=initial_agent, agents=agents, initial_message=initial_message, logger=logger, uuid=uuid + ) diff --git a/autogen/agentchat/experimental/initiate_chat.py b/autogen/agentchat/experimental/initiate_chat.py new file mode 100644 index 0000000000..5490d6c997 --- /dev/null +++ b/autogen/agentchat/experimental/initiate_chat.py @@ -0,0 +1,134 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +import getpass +from concurrent.futures import ThreadPoolExecutor +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any, AsyncIterable, Iterable, Optional, Protocol, Union, runtime_checkable +from uuid import UUID + +from ...io import IOStream +from ...messages.print_message import PrintMessage +from .chat_context import ChatContext + +if TYPE_CHECKING: + from ...messages import BaseMessage + from ..agent import Agent + +__all__ = ["AsyncResponseProtocol", "ChatContext", "ResponseProtocol", "a_initiate_chat", "initiate_chat"] + + +@runtime_checkable +class ResponseProtocol(Protocol): + @property + def messages(self) -> Iterable["BaseMessage"]: + """The messages received by the agent.""" + ... + + # todo: replace request_uuid with InputResponseMessage + def send(self, request_uuid: UUID, response: str) -> None: + """Send a response to a request.""" + ... + + +@runtime_checkable +class AsyncResponseProtocol(Protocol): + @property + def messages(self) -> AsyncIterable["BaseMessage"]: + """The messages received by the agent.""" + ... + + +@dataclass +class Response: + iostream: IOStream + chat_context: ChatContext + + @property + def messages(self) -> Iterable["BaseMessage"]: + """The messages received by the agent.""" + raise NotImplementedError("This function is not implemented yet.") + + # todo: replace request_uuid with InputResponseMessage + def send(self, request_uuid: UUID, response: str) -> None: + """Send a response to a request.""" + raise NotImplementedError("This function is not implemented yet.") + + # check if the Response implements the ResponseProtocol protocol + if TYPE_CHECKING: + + @staticmethod + def _type_check(iostream: IOStream, chat_context: ChatContext) -> ResponseProtocol: + return Response(iostream=iostream, chat_context=chat_context) + + +class InitiateChatIOStream: + def print(self, *objects: Any, sep: str = " ", end: str = "\n", flush: bool = False) -> None: + """Print data to the output stream. + + Args: + objects (any): The data to print. + sep (str, optional): The separator between objects. Defaults to " ". + end (str, optional): The end of the output. Defaults to "\n". + flush (bool, optional): Whether to flush the output. Defaults to False. + """ + raise NotImplementedError("This function is not implemented yet.") + + def send(self, message: BaseMessage) -> None: + """Send a message to the output stream. + + Args: + message (Any): The message to send. + """ + raise NotImplementedError("This function is not implemented yet.") + + def input(self, prompt: str = "", *, password: bool = False) -> str: + """Read a line from the input stream. + + Args: + prompt (str, optional): The prompt to display. Defaults to "". + password (bool, optional): Whether to read a password. Defaults to False. + + Returns: + str: The line read from the input stream. + + """ + raise NotImplementedError("This function is not implemented yet.") + + # check if the InitiateChatIOStream implements the IOStream protocol + if TYPE_CHECKING: + + @staticmethod + def _type_check(agent: Agent) -> IOStream: + return InitiateChatIOStream() + + +def initiate_chat( + agent: "Agent", + *, + message: Union[str, dict[str, Any]], + recipient: Optional["Agent"] = None, +) -> ResponseProtocol: + # start initiate chat in a background thread + iostream = InitiateChatIOStream() + chat_context = ChatContext( + initial_agent=agent, + agents=[recipient] if recipient else [], + initial_message=message, + ) + response = Response(iostream=iostream, chat_context=chat_context) + + with ThreadPoolExecutor() as executor: + executor.submit(agent.initiate_chat, agent, message=message, recipient=recipient) + + return response + + +async def a_initiate_chat( + agent: "Agent", + *, + message: Union[str, dict[str, Any]], + recipient: Optional["Agent"] = None, +) -> ResponseProtocol: + raise NotImplementedError("This function is not implemented yet.") diff --git a/autogen/agentchat/initiate_chat.py b/autogen/agentchat/initiate_chat.py deleted file mode 100644 index f699fe48e9..0000000000 --- a/autogen/agentchat/initiate_chat.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -# -# SPDX-License-Identifier: Apache-2.0 - -from typing import TYPE_CHECKING, Any, Iterable, Optional, Protocol, Union - -from .agent import Agent - -if TYPE_CHECKING: - from ..messages import BaseMessage - - -class ResponseProtocol(Protocol): - @property - def messages(self) -> Iterable["BaseMessage"]: - """The messages received by the agent.""" - ... - -class AsyncResponseProtocol(Protocol): - @property - def messages(self) -> Iterable["BaseMessage"]: - """The messages received by the agent.""" - ... - - -def initiate_chat( - agent: Agent, *, message: Union[str, dict[str, Any]], recipient: Optional[Agent], request_reply: bool = False -) -> ResponseProtocol: - pass - -def a_initiate_chat( - agent: Agent, *, message: Union[str, dict[str, Any]], recipient: Optional[Agent], request_reply: bool = False -) -> ResponseProtocol: - pass \ No newline at end of file diff --git a/autogen/io/console.py b/autogen/io/console.py index eb6e2989fd..679a34009b 100644 --- a/autogen/io/console.py +++ b/autogen/io/console.py @@ -7,9 +7,8 @@ import getpass from typing import Any -from autogen.messages.base_message import BaseMessage -from autogen.messages.print_message import PrintMessage - +from ..messages.base_message import BaseMessage +from ..messages.print_message import PrintMessage from .base import IOStream __all__ = ("IOConsole",) diff --git a/pyproject.toml b/pyproject.toml index 7ad9617a00..966fb21401 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -342,7 +342,7 @@ files = [ "autogen/interop", "autogen/agentchat/realtime_agent", "autogen/messages", - "autogen/agentchat/initiate_chat.py", + "autogen/agentchat/experimental", "test/test_pydantic.py", "test/io", "test/tools", diff --git a/test/agentchat/experimental/__init__.py b/test/agentchat/experimental/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/agentchat/experimental/test_initiate_chat.py b/test/agentchat/experimental/test_initiate_chat.py new file mode 100644 index 0000000000..1bbed4318e --- /dev/null +++ b/test/agentchat/experimental/test_initiate_chat.py @@ -0,0 +1,6 @@ +from autogen.agentchat.experimental import initiate_chat + + +def test_initiate_chat(): + initiate_chat(agent="test", message="test") + assert True From 4449dabb53cab880dfa1ca54cd6633f2de647fb3 Mon Sep 17 00:00:00 2001 From: Davor Runje Date: Fri, 17 Jan 2025 22:43:48 +0100 Subject: [PATCH 3/3] wip --- autogen/agentchat/experimental/initiate_chat.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/autogen/agentchat/experimental/initiate_chat.py b/autogen/agentchat/experimental/initiate_chat.py index 5490d6c997..f372e37a03 100644 --- a/autogen/agentchat/experimental/initiate_chat.py +++ b/autogen/agentchat/experimental/initiate_chat.py @@ -2,14 +2,12 @@ # # SPDX-License-Identifier: Apache-2.0 -import getpass from concurrent.futures import ThreadPoolExecutor from dataclasses import dataclass from typing import TYPE_CHECKING, Any, AsyncIterable, Iterable, Optional, Protocol, Union, runtime_checkable from uuid import UUID from ...io import IOStream -from ...messages.print_message import PrintMessage from .chat_context import ChatContext if TYPE_CHECKING: