|
| 1 | +# Copyright (c) Microsoft. All rights reserved. |
| 2 | + |
| 3 | +import logging |
| 4 | +from collections.abc import AsyncIterable, Callable |
| 5 | +from typing import TYPE_CHECKING, Any |
| 6 | + |
| 7 | +from autogen import ConversableAgent |
| 8 | + |
| 9 | +from semantic_kernel.agents.agent import Agent |
| 10 | +from semantic_kernel.contents.chat_message_content import ChatMessageContent |
| 11 | +from semantic_kernel.contents.function_call_content import FunctionCallContent |
| 12 | +from semantic_kernel.contents.function_result_content import FunctionResultContent |
| 13 | +from semantic_kernel.contents.text_content import TextContent |
| 14 | +from semantic_kernel.contents.utils.author_role import AuthorRole |
| 15 | +from semantic_kernel.exceptions.agent_exceptions import AgentInvokeException |
| 16 | +from semantic_kernel.functions.kernel_arguments import KernelArguments |
| 17 | + |
| 18 | +if TYPE_CHECKING: |
| 19 | + from autogen.cache import AbstractCache |
| 20 | + |
| 21 | + from semantic_kernel.kernel import Kernel |
| 22 | + |
| 23 | +logger: logging.Logger = logging.getLogger(__name__) |
| 24 | + |
| 25 | + |
| 26 | +class AutoGenConversableAgent(Agent): |
| 27 | + """A Semantic Kernel wrapper around an AutoGen 0.2 `ConversableAgent`. |
| 28 | +
|
| 29 | + This allows one to use it as a Semantic Kernel `Agent`. Note: this agent abstraction |
| 30 | + does not currently allow for the use of AgentGroupChat within Semantic Kernel. |
| 31 | + """ |
| 32 | + |
| 33 | + conversable_agent: ConversableAgent |
| 34 | + |
| 35 | + def __init__(self, conversable_agent: ConversableAgent, **kwargs: Any) -> None: |
| 36 | + """Initialize the AutoGenConversableAgent. |
| 37 | +
|
| 38 | + Args: |
| 39 | + conversable_agent: The existing AutoGen 0.2 ConversableAgent instance |
| 40 | + kwargs: Other Agent base class arguments (e.g. name, id, instructions) |
| 41 | + """ |
| 42 | + args: dict[str, Any] = { |
| 43 | + "name": conversable_agent.name, |
| 44 | + "description": conversable_agent.description, |
| 45 | + "instructions": conversable_agent.system_message, |
| 46 | + "conversable_agent": conversable_agent, |
| 47 | + } |
| 48 | + |
| 49 | + if kwargs: |
| 50 | + args.update(kwargs) |
| 51 | + |
| 52 | + super().__init__(**args) |
| 53 | + |
| 54 | + async def invoke( |
| 55 | + self, |
| 56 | + *, |
| 57 | + recipient: "AutoGenConversableAgent | None" = None, |
| 58 | + clear_history: bool = True, |
| 59 | + silent: bool = True, |
| 60 | + cache: "AbstractCache | None" = None, |
| 61 | + max_turns: int | None = None, |
| 62 | + summary_method: str | Callable | None = ConversableAgent.DEFAULT_SUMMARY_METHOD, |
| 63 | + summary_args: dict | None = {}, |
| 64 | + message: dict | str | Callable | None = None, |
| 65 | + **kwargs: Any, |
| 66 | + ) -> AsyncIterable[ChatMessageContent]: |
| 67 | + """A direct `invoke` method for the ConversableAgent. |
| 68 | +
|
| 69 | + Args: |
| 70 | + recipient: The recipient ConversableAgent to chat with |
| 71 | + clear_history: Whether to clear the chat history before starting. True by default. |
| 72 | + silent: Whether to suppress console output. True by default. |
| 73 | + cache: The cache to use for storing chat history |
| 74 | + max_turns: The maximum number of turns to chat for |
| 75 | + summary_method: The method to use for summarizing the chat |
| 76 | + summary_args: The arguments to pass to the summary method |
| 77 | + message: The initial message to send. If message is not provided, |
| 78 | + the agent will wait for the user to provide the first message. |
| 79 | + kwargs: Additional keyword arguments |
| 80 | + """ |
| 81 | + if recipient is not None: |
| 82 | + if not isinstance(recipient, AutoGenConversableAgent): |
| 83 | + raise AgentInvokeException( |
| 84 | + f"Invalid recipient type: {type(recipient)}. " |
| 85 | + "Recipient must be an instance of AutoGenConversableAgent." |
| 86 | + ) |
| 87 | + |
| 88 | + chat_result = await self.conversable_agent.a_initiate_chat( |
| 89 | + recipient=recipient.conversable_agent, |
| 90 | + clear_history=clear_history, |
| 91 | + silent=silent, |
| 92 | + cache=cache, |
| 93 | + max_turns=max_turns, |
| 94 | + summary_method=summary_method, |
| 95 | + summary_args=summary_args, |
| 96 | + message=message, # type: ignore |
| 97 | + **kwargs, |
| 98 | + ) |
| 99 | + |
| 100 | + logger.info(f"Called AutoGenConversableAgent.a_initiate_chat with recipient: {recipient}") |
| 101 | + |
| 102 | + for message in chat_result.chat_history: |
| 103 | + yield AutoGenConversableAgent._to_chat_message_content(message) # type: ignore |
| 104 | + else: |
| 105 | + reply = await self.conversable_agent.a_generate_reply( |
| 106 | + messages=[{"role": "user", "content": message}], |
| 107 | + ) |
| 108 | + |
| 109 | + logger.info(f"Called AutoGenConversableAgent.a_generate_reply with recipient: {recipient}") |
| 110 | + |
| 111 | + if isinstance(reply, str): |
| 112 | + yield ChatMessageContent(content=reply, role=AuthorRole.ASSISTANT) |
| 113 | + elif isinstance(reply, dict): |
| 114 | + yield ChatMessageContent(**reply) |
| 115 | + else: |
| 116 | + raise AgentInvokeException(f"Unexpected reply type from `a_generate_reply`: {type(reply)}") |
| 117 | + |
| 118 | + async def invoke_stream( |
| 119 | + self, |
| 120 | + message: str, |
| 121 | + kernel: "Kernel | None" = None, |
| 122 | + arguments: KernelArguments | None = None, |
| 123 | + **kwargs: Any, |
| 124 | + ) -> AsyncIterable[ChatMessageContent]: |
| 125 | + """Invoke the agent with a stream of messages.""" |
| 126 | + raise NotImplementedError("The AutoGenConversableAgent does not support streaming.") |
| 127 | + |
| 128 | + @staticmethod |
| 129 | + def _to_chat_message_content(message: dict[str, Any]) -> ChatMessageContent: |
| 130 | + """Translate an AutoGen message to a Semantic Kernel ChatMessageContent.""" |
| 131 | + items: list[TextContent | FunctionCallContent | FunctionResultContent] = [] |
| 132 | + role = AuthorRole(message.get("role")) |
| 133 | + name: str = message.get("name", "") |
| 134 | + |
| 135 | + content = message.get("content") |
| 136 | + if content is not None: |
| 137 | + text = TextContent(text=content) |
| 138 | + items.append(text) |
| 139 | + |
| 140 | + if role == AuthorRole.ASSISTANT: |
| 141 | + tool_calls = message.get("tool_calls") |
| 142 | + if tool_calls is not None: |
| 143 | + for tool_call in tool_calls: |
| 144 | + items.append( |
| 145 | + FunctionCallContent( |
| 146 | + id=tool_call.get("id"), |
| 147 | + function_name=tool_call.get("name"), |
| 148 | + arguments=tool_call.get("function").get("arguments"), |
| 149 | + ) |
| 150 | + ) |
| 151 | + |
| 152 | + if role == AuthorRole.TOOL: |
| 153 | + tool_responses = message.get("tool_responses") |
| 154 | + if tool_responses is not None: |
| 155 | + for tool_response in tool_responses: |
| 156 | + items.append( |
| 157 | + FunctionResultContent( |
| 158 | + id=tool_response.get("tool_call_id"), |
| 159 | + result=tool_response.get("content"), |
| 160 | + ) |
| 161 | + ) |
| 162 | + |
| 163 | + return ChatMessageContent(role=role, items=items, name=name) # type: ignore |
0 commit comments