From b69245f1b89e1e9997137c0d7099348aa21f8358 Mon Sep 17 00:00:00 2001 From: Chi Wang <4250911+sonichi@users.noreply.github.com> Date: Mon, 20 Jan 2025 07:06:17 -0800 Subject: [PATCH] doc improvement & repo cleanup (#548) * remove dotnet * Cleanup examples section * wording * Fix test and update website file --------- Co-authored-by: Davor Runje Co-authored-by: Kumaran Rajendhiran --- .github/workflows/dotnet-build.yml | 233 ------ .github/workflows/dotnet-release.yml | 77 -- README.md | 2 +- dotnet/.config/dotnet-tools.json | 18 - dotnet/.editorconfig | 179 ----- dotnet/.gitignore | 30 - dotnet/.tools/run_all_notebook.ps1 | 64 -- dotnet/.tools/test-aot-compatibility.ps1 | 41 - dotnet/AutoGen.sln | 257 ------ dotnet/Directory.Build.props | 51 -- dotnet/NuGet.config | 8 - dotnet/README.md | 103 --- dotnet/eng/MetaInfo.props | 12 - dotnet/eng/Sign.props | 22 - dotnet/eng/Version.props | 21 - dotnet/eng/opensource.snk | Bin 596 -> 0 bytes dotnet/global.json | 6 - dotnet/nuget/NUGET.md | 8 - dotnet/nuget/icon.png | 3 - dotnet/nuget/nuget-package.props | 54 -- dotnet/resource/images/background.png | 3 - dotnet/resource/images/square.png | 3 - .../Anthropic_Agent_With_Prompt_Caching.cs | 139 ---- .../AutoGen.Anthropic.Samples.csproj | 19 - .../Create_Anthropic_Agent.cs | 34 - .../Create_Anthropic_Agent_With_Tool.cs | 106 --- .../AutoGen.Anthropic.Samples/Program.cs | 18 - .../AutoGen.BasicSample.csproj | 19 - .../CodeSnippet/AgentCodeSnippet.cs | 37 - .../CodeSnippet/BuildInMessageCodeSnippet.cs | 48 -- .../CodeSnippet/CreateAnAgent.cs | 148 ---- .../CodeSnippet/FunctionCallCodeSnippet.cs | 155 ---- .../CodeSnippet/GetStartCodeSnippet.cs | 47 -- .../CodeSnippet/MiddlewareAgentCodeSnippet.cs | 183 ----- .../CodeSnippet/MistralAICodeSnippet.cs | 92 --- .../CodeSnippet/OpenAICodeSnippet.cs | 142 ---- .../PrintMessageMiddlewareCodeSnippet.cs | 50 -- .../CodeSnippet/RunCodeSnippetCodeSnippet.cs | 86 --- .../CodeSnippet/SemanticKernelCodeSnippet.cs | 107 --- .../TypeSafeFunctionCallCodeSnippet.cs | 127 --- .../CodeSnippet/UserProxyAgentCodeSnippet.cs | 26 - .../Example01_AssistantAgent.cs | 52 -- .../Example02_TwoAgent_MathChat.cs | 86 --- .../Example03_Agent_FunctionCall.cs | 112 --- ...Example04_Dynamic_GroupChat_Coding_Task.cs | 270 ------- .../Example05_Dalle_And_GPT4V.cs | 140 ---- .../Example06_UserProxyAgent.cs | 38 - ...7_Dynamic_GroupChat_Calculate_Fibonacci.cs | 389 ---------- .../Example08_LMStudio.cs | 50 -- .../Example09_LMStudio_FunctionCall.cs | 143 ---- .../Example10_SemanticKernel.cs | 86 --- .../Example11_Sequential_GroupChat_Example.cs | 100 --- .../Example12_TwoAgent_Fill_Application.cs | 194 ----- .../Example13_OpenAIAgent_JsonMode.cs | 11 - ...Example14_MistralClientAgent_TokenCount.cs | 71 -- .../Example15_GPT4V_BinaryDataImageMessage.cs | 71 -- ...nAIChatAgent_ConnectToThirdPartyBackend.cs | 10 - .../Example17_ReActAgent.cs | 193 ----- .../GettingStart/Agent_Middleware.cs | 86 --- .../GettingStart/Chat_With_Agent.cs | 65 -- .../GettingStart/Dynamic_Group_Chat.cs | 97 --- .../GettingStart/FSM_Group_Chat.cs | 196 ----- .../GettingStart/Image_Chat_With_Agent.cs | 59 -- .../GettingStart/Streaming_Tool_Call.cs | 62 -- .../GettingStart/Use_Tools_With_Agent.cs | 113 --- .../AutoGen.BasicSamples/GlobalUsing.cs | 9 - .../AutoGen.BasicSamples/LLMConfiguration.cs | 46 -- dotnet/sample/AutoGen.BasicSamples/Program.cs | 65 -- .../AutoGen.Gemini.Sample.csproj | 19 - .../Chat_With_Google_Gemini.cs | 47 -- .../Chat_With_Vertex_Gemini.cs | 48 -- .../Function_Call_With_Gemini.cs | 137 ---- .../Image_Chat_With_Vertex_Gemini.cs | 51 -- .../sample/AutoGen.Gemini.Sample/Program.cs | 12 - .../AutoGen.Ollama.Sample.csproj | 19 - .../AutoGen.Ollama.Sample/Chat_With_LLaMA.cs | 38 - .../AutoGen.Ollama.Sample/Chat_With_LLaVA.cs | 54 -- .../sample/AutoGen.Ollama.Sample/Program.cs | 12 - .../AutoGen.OpenAI.V1.Sample.csproj | 21 - .../Connect_To_Ollama.cs | 69 -- .../sample/AutoGen.OpenAI.Sample/Program.cs | 12 - .../Tool_Call_With_Ollama_And_LiteLLM.cs | 75 -- .../AutoGen.OpenAI.Sample/Use_Json_Mode.cs | 73 -- .../AutoGen.SemanticKernel.Sample.csproj | 17 - .../Create_Semantic_Kernel_Agent.cs | 35 - .../Create_Semantic_Kernel_Chat_Agent.cs | 50 -- .../AutoGen.SemanticKernel.Sample/Program.cs | 12 - ..._Bing_Search_With_Semantic_Kernel_Agent.cs | 43 -- .../Use_Kernel_Functions_With_Other_Agent.cs | 58 -- .../AutoGen.WebAPI.Sample.csproj | 13 - .../sample/AutoGen.WebAPI.Sample/Program.cs | 51 -- .../Agent/AnthropicClientAgent.cs | 126 --- .../src/AutoGen.Anthropic/AnthropicClient.cs | 208 ----- .../AutoGen.Anthropic.csproj | 22 - .../Converters/ContentBaseConverter.cs | 45 -- .../JsonPropertyNameEnumCoverter.cs | 50 -- .../Converters/SystemMessageConverter.cs | 48 -- .../DTO/ChatCompletionRequest.cs | 99 --- .../DTO/ChatCompletionResponse.cs | 103 --- dotnet/src/AutoGen.Anthropic/DTO/Content.cs | 101 --- .../AutoGen.Anthropic/DTO/ErrorResponse.cs | 27 - dotnet/src/AutoGen.Anthropic/DTO/Tool.cs | 49 -- .../src/AutoGen.Anthropic/DTO/ToolChoice.cs | 45 -- .../Extensions/AnthropicAgentExtension.cs | 40 - .../Middleware/AnthropicMessageConnector.cs | 291 ------- .../Utils/AnthropicConstants.cs | 21 - .../Agent/ChatCompletionsClientAgent.cs | 208 ----- .../AutoGen.AzureAIInference.csproj | 25 - .../ChatComptionClientAgentExtension.cs | 45 -- .../Extension/FunctionContractExtension.cs | 70 -- ...eAIInferenceChatRequestMessageConnector.cs | 308 -------- .../AutoGen.Core/Agent/DefaultReplyAgent.cs | 37 - .../AutoGen.Core/Agent/GroupChatManager.cs | 40 - dotnet/src/AutoGen.Core/Agent/IAgent.cs | 60 -- .../AutoGen.Core/Agent/IMiddlewareAgent.cs | 60 -- .../src/AutoGen.Core/Agent/IStreamingAgent.cs | 24 - .../src/AutoGen.Core/Agent/MiddlewareAgent.cs | 146 ---- .../Agent/MiddlewareStreamingAgent.cs | 124 --- dotnet/src/AutoGen.Core/AutoGen.Core.csproj | 26 - .../AutoGen.Core/Extension/AgentExtension.cs | 190 ----- .../Extension/GroupChatExtension.cs | 157 ---- .../Extension/MessageExtension.cs | 229 ------ .../Extension/MiddlewareExtension.cs | 151 ---- .../PrintMessageMiddlewareExtension.cs | 75 -- .../Extension/StreamingMiddlewareExtension.cs | 43 -- .../Function/FunctionAttribute.cs | 99 --- dotnet/src/AutoGen.Core/GroupChat/Graph.cs | 136 ---- .../src/AutoGen.Core/GroupChat/GroupChat.cs | 219 ------ .../src/AutoGen.Core/GroupChat/IGroupChat.cs | 28 - .../GroupChat/RoundRobinGroupChat.cs | 39 - dotnet/src/AutoGen.Core/ILLMConfig.cs | 14 - .../AutoGen.Core/Message/AggregateMessage.cs | 59 -- dotnet/src/AutoGen.Core/Message/IMessage.cs | 82 -- .../src/AutoGen.Core/Message/ImageMessage.cs | 88 --- dotnet/src/AutoGen.Core/Message/Message.cs | 61 -- .../AutoGen.Core/Message/MessageEnvelope.cs | 43 -- .../AutoGen.Core/Message/MultiModalMessage.cs | 64 -- dotnet/src/AutoGen.Core/Message/Role.cs | 60 -- .../src/AutoGen.Core/Message/TextMessage.cs | 79 -- .../Message/ToolCallAggregateMessage.cs | 34 - .../AutoGen.Core/Message/ToolCallMessage.cs | 132 ---- .../Message/ToolCallResultMessage.cs | 59 -- .../Middleware/DelegateMiddleware.cs | 51 -- .../Middleware/FunctionCallMiddleware.cs | 182 ----- .../AutoGen.Core/Middleware/IMiddleware.cs | 32 - .../Middleware/IStreamingMiddleware.cs | 27 - .../Middleware/MiddlewareContext.cs | 33 - .../Middleware/PrintMessageMiddleware.cs | 124 --- .../Orchestrator/IOrchestrator.cs | 34 - .../Orchestrator/RolePlayOrchestrator.cs | 122 --- .../Orchestrator/RoundRobinOrchestrator.cs | 50 -- .../Orchestrator/WorkflowOrchestrator.cs | 59 -- .../AutoGen.DotnetInteractive.csproj | 40 - .../DotnetInteractiveFunction.cs | 186 ----- .../DotnetInteractiveKernelBuilder.cs | 34 - .../DotnetInteractiveStdioKernelConnector.cs | 92 --- .../Extension/AgentExtension.cs | 90 --- .../Extension/KernelExtension.cs | 87 --- .../Extension/MessageExtension.cs | 59 -- .../AutoGen.DotnetInteractive/GlobalUsing.cs | 10 - ...nProccessDotnetInteractiveKernelBuilder.cs | 116 --- .../InteractiveService.cs | 241 ------ .../RestoreInteractive.config | 9 - .../dotnet-tools.json | 12 - .../src/AutoGen.Gemini/AutoGen.Gemini.csproj | 27 - .../Extension/FunctionContractExtension.cs | 96 --- dotnet/src/AutoGen.Gemini/GeminiChatAgent.cs | 274 ------- .../src/AutoGen.Gemini/GoogleGeminiClient.cs | 89 --- dotnet/src/AutoGen.Gemini/IGeminiClient.cs | 21 - .../Middleware/GeminiAgentExtension.cs | 46 -- .../Middleware/GeminiMessageConnector.cs | 489 ------------ .../src/AutoGen.Gemini/VertexGeminiClient.cs | 44 -- .../AutoGen.LMStudio/AutoGen.LMStudio.csproj | 23 - dotnet/src/AutoGen.LMStudio/GlobalUsing.cs | 10 - dotnet/src/AutoGen.LMStudio/LMStudioAgent.cs | 94 --- dotnet/src/AutoGen.LMStudio/LMStudioConfig.cs | 36 - dotnet/src/AutoGen.LMStudio/README.md | 31 - .../Agent/MistralClientAgent.cs | 136 ---- .../AutoGen.Mistral/AutoGen.Mistral.csproj | 23 - .../JsonPropertyNameEnumConverter.cs | 49 -- .../DTOs/ChatCompletionRequest.cs | 125 --- .../DTOs/ChatCompletionResponse.cs | 56 -- .../src/AutoGen.Mistral/DTOs/ChatMessage.cs | 109 --- dotnet/src/AutoGen.Mistral/DTOs/Choice.cs | 64 -- dotnet/src/AutoGen.Mistral/DTOs/Error.cs | 45 -- .../src/AutoGen.Mistral/DTOs/ErrorResponse.cs | 25 - .../DTOs/FunctionDefinition.cs | 32 - dotnet/src/AutoGen.Mistral/DTOs/Model.cs | 70 -- .../AutoGen.Mistral/DTOs/ResponseFormat.cs | 18 - dotnet/src/AutoGen.Mistral/DTOs/Tool.cs | 57 -- dotnet/src/AutoGen.Mistral/DTOs/Usage.cs | 32 - .../Extension/FunctionContractExtension.cs | 65 -- .../Extension/MistralAgentExtension.cs | 43 -- .../Middleware/MistralChatMessageConnector.cs | 328 -------- .../src/AutoGen.Mistral/MistralAIModelID.cs | 20 - dotnet/src/AutoGen.Mistral/MistralClient.cs | 174 ----- .../src/AutoGen.Ollama/Agent/OllamaAgent.cs | 191 ----- .../src/AutoGen.Ollama/AutoGen.Ollama.csproj | 23 - dotnet/src/AutoGen.Ollama/DTOs/ChatRequest.cs | 59 -- .../src/AutoGen.Ollama/DTOs/ChatResponse.cs | 51 -- .../AutoGen.Ollama/DTOs/ChatResponseUpdate.cs | 27 - dotnet/src/AutoGen.Ollama/DTOs/Message.cs | 43 -- .../AutoGen.Ollama/DTOs/ModelReplyOptions.cs | 135 ---- .../AutoGen.Ollama/DTOs/OllamaReplyOptions.cs | 117 --- .../Embeddings/ITextEmbeddingService.cs | 18 - .../Embeddings/OllamaTextEmbeddingService.cs | 50 -- .../Embeddings/TextEmbeddingsRequest.cs | 38 - .../Embeddings/TextEmbeddingsResponse.cs | 18 - .../Extension/OllamaAgentExtension.cs | 45 -- .../Middlewares/OllamaMessageConnector.cs | 192 ----- dotnet/src/AutoGen.Ollama/OllamaConsts.cs | 18 - .../src/AutoGen.OpenAI.V1/Agent/GPTAgent.cs | 120 --- .../Agent/OpenAIChatAgent.cs | 212 ----- .../AutoGen.OpenAI.V1.csproj | 25 - .../AutoGen.OpenAI.V1/AzureOpenAIConfig.cs | 29 - .../Extension/FunctionContractExtension.cs | 69 -- .../Extension/MessageExtension.cs | 237 ------ .../Extension/OpenAIAgentExtension.cs | 43 -- dotnet/src/AutoGen.OpenAI.V1/GlobalUsing.cs | 10 - .../OpenAIChatRequestMessageConnector.cs | 393 ---------- dotnet/src/AutoGen.OpenAI.V1/OpenAIConfig.cs | 23 - .../AutoGen.SemanticKernel.csproj | 28 - .../Extension/KernelExtension.cs | 54 -- .../Extension/SemanticKernelAgentExtension.cs | 43 -- .../src/AutoGen.SemanticKernel/GlobalUsing.cs | 10 - .../Middleware/KernelPluginMiddleware.cs | 83 -- ...manticKernelChatMessageContentConnector.cs | 262 ------- .../SemanticKernelAgent.cs | 127 --- .../SemanticKernelChatCompletionAgent.cs | 57 -- .../AutoGen.SourceGenerator.csproj | 64 -- .../DocumentCommentExtension.cs | 301 -------- .../FunctionCallGenerator.cs | 254 ------ .../FunctionExtension.cs | 38 - dotnet/src/AutoGen.SourceGenerator/README.md | 113 --- .../SourceGeneratorFunctionContract.cs | 46 -- .../Template/FunctionCallTemplate.cs | 442 ----------- .../Template/FunctionCallTemplate.tt | 109 --- .../src/AutoGen.WebAPI/AutoGen.WebAPI.csproj | 27 - dotnet/src/AutoGen.WebAPI/Extension.cs | 30 - .../Converter/OpenAIMessageConverter.cs | 62 -- .../OpenAI/DTO/OpenAIAssistantMessage.cs | 27 - .../OpenAI/DTO/OpenAIChatCompletion.cs | 36 - .../OpenAI/DTO/OpenAIChatCompletionChoice.cs | 27 - .../OpenAI/DTO/OpenAIChatCompletionMessage.cs | 21 - .../OpenAI/DTO/OpenAIChatCompletionOption.cs | 39 - .../OpenAI/DTO/OpenAIChatCompletionUsage.cs | 24 - .../OpenAI/DTO/OpenAIImageUrlObject.cs | 21 - .../OpenAI/DTO/OpenAIMessage.cs | 19 - .../OpenAI/DTO/OpenAIStreamOptions.cs | 18 - .../OpenAI/DTO/OpenAISystemMessage.cs | 24 - .../OpenAI/DTO/OpenAIToolCallObject.cs | 21 - .../OpenAI/DTO/OpenAIToolMessage.cs | 24 - .../OpenAI/DTO/OpenAIUserImageContent.cs | 21 - .../OpenAI/DTO/OpenAIUserMessage.cs | 24 - .../OpenAI/DTO/OpenAIUserMessageItem.cs | 18 - .../OpenAI/DTO/OpenAIUserMultiModalMessage.cs | 24 - .../OpenAI/DTO/OpenAIUserTextContent.cs | 21 - .../Service/OpenAIChatCompletionService.cs | 163 ---- .../OpenAIChatCompletionMiddleware.cs | 98 --- dotnet/src/AutoGen/API/LLMConfigAPI.cs | 56 -- dotnet/src/AutoGen/Agent/AssistantAgent.cs | 36 - dotnet/src/AutoGen/Agent/ConversableAgent.cs | 187 ----- dotnet/src/AutoGen/Agent/UserProxyAgent.cs | 36 - dotnet/src/AutoGen/AutoGen.csproj | 37 - dotnet/src/AutoGen/ConversableAgentConfig.cs | 23 - dotnet/src/AutoGen/GlobalUsing.cs | 10 - .../Middleware/HumanInputMiddleware.cs | 107 --- dotnet/test/.editorconfig | 7 - .../AnthropicClientAgentTest.cs | 235 ------ .../AnthropicClientTest.cs | 248 ------ .../AnthropicTestFunctionCalls.cs | 46 -- .../AnthropicTestUtils.cs | 150 ---- .../AutoGen.Anthropic.Tests.csproj | 23 - .../images/.gitattributes | 1 - .../AutoGen.Anthropic.Tests/images/square.png | 3 - .../AutoGen.AotCompatibility.Tests.csproj | 24 - .../AutoGen.AotCompatibility.Tests/Program.cs | 10 - .../AutoGen.AzureAIInference.Tests.csproj | 16 - .../ChatCompletionClientAgentTests.cs | 539 ------------- .../ChatRequestMessageTests.cs | 574 -------------- .../AutoGen.DotnetInteractive.Tests.csproj | 21 - .../DotnetInteractiveServiceTest.cs | 89 --- ...netInteractiveStdioKernelConnectorTests.cs | 91 --- ...ocessDotnetInteractiveKernelBuilderTest.cs | 85 -- .../MessageExtensionTests.cs | 90 --- ....ItGenerateGetWeatherToolTest.approved.txt | 17 - .../AutoGen.Gemini.Tests.csproj | 19 - .../FunctionContractExtensionTests.cs | 33 - dotnet/test/AutoGen.Gemini.Tests/Functions.cs | 34 - .../AutoGen.Gemini.Tests/GeminiAgentTests.cs | 316 -------- .../GeminiMessageTests.cs | 385 --------- .../GoogleGeminiClientTests.cs | 138 ---- .../test/AutoGen.Gemini.Tests/SampleTests.cs | 34 - .../VertexGeminiClientTests.cs | 140 ---- .../AutoGen.Mistral.Tests.csproj | 18 - .../MistralClientAgentTests.cs | 247 ------ .../MistralClientTests.cs | 293 ------- .../AutoGen.Ollama.Tests.csproj | 25 - .../AutoGen.Ollama.Tests/OllamaAgentTests.cs | 230 ------ .../OllamaMessageTests.cs | 182 ----- .../OllamaTextEmbeddingServiceTests.cs | 33 - .../AutoGen.Ollama.Tests/images/image.png | 3 - .../AutoGen.Ollama.Tests/images/square.png | 3 - ...MessageTests.BasicMessageTest.approved.txt | 174 ----- .../AutoGen.OpenAI.V1.Tests.csproj | 25 - .../AutoGen.OpenAI.V1.Tests/GlobalUsing.cs | 10 - .../AutoGen.OpenAI.V1.Tests/MathClassTest.cs | 228 ------ .../OpenAIChatAgentTest.cs | 285 ------- .../OpenAIMessageTests.cs | 730 ------------------ ...teFunctionContractsFromMethod.approved.txt | 23 - ...teFunctionContractsFromPrompt.approved.txt | 8 - ...nctionContractsFromTestPlugin.approved.txt | 25 - .../AutoGen.SemanticKernel.Tests.csproj | 19 - .../KernelFunctionExtensionTests.cs | 110 --- .../KernelFunctionMiddlewareTests.cs | 129 ---- .../SemanticKernelAgentTest.cs | 248 ------ ...ests.TestFunctionCallTemplate.approved.txt | 65 -- .../FunctionExample.Add_Test.approved.txt | 21 - ...ample.DictionaryToString_Test.approved.txt | 19 - .../FunctionExample.Query_Test.approved.txt | 24 - .../FunctionExample.Sum_Test.approved.txt | 19 - .../AutoGen.SourceGenerator.Tests.csproj | 16 - .../FilescopeNamespaceFunctionExample.cs | 20 - .../FunctionCallTemplateEncodingTests.cs | 100 --- .../FunctionCallTemplateTests.cs | 52 -- .../FunctionExample.test.cs | 137 ---- .../FunctionExamples.cs | 76 -- .../TopLevelStatementFunctionExample.cs | 19 - .../EnvironmentSpecificFactAttribute.cs | 37 - .../Attribute/OpenAIFact.cs | 28 - .../AutoGen.Tests.Share.csproj | 15 - dotnet/test/AutoGen.Test.Share/EchoAgent.cs | 43 -- .../AutoGen.Tests/ApprovalTests/square.png | 3 - .../test/AutoGen.Tests/AutoGen.Tests.csproj | 24 - dotnet/test/AutoGen.Tests/BasicSampleTest.cs | 86 --- dotnet/test/AutoGen.Tests/GlobalUsing.cs | 10 - .../AutoGen.Tests/GroupChat/GraphTests.cs | 26 - .../AutoGen.Tests/GroupChat/GroupChatTests.cs | 119 --- .../test/AutoGen.Tests/ImageMessageTests.cs | 44 -- .../test/AutoGen.Tests/MiddlewareAgentTest.cs | 111 --- dotnet/test/AutoGen.Tests/MiddlewareTest.cs | 132 ---- .../Orchestrator/RolePlayOrchestratorTests.cs | 387 ---------- .../RoundRobinOrchestratorTests.cs | 109 --- .../Orchestrator/WorkflowOrchestratorTests.cs | 118 --- dotnet/test/AutoGen.Tests/SingleAgentTest.cs | 378 --------- dotnet/test/AutoGen.Tests/TwoAgentTest.cs | 112 --- dotnet/test/AutoGen.Tests/WorkflowTest.cs | 76 -- .../AutoGen.WebAPI.Tests.csproj | 28 - dotnet/test/AutoGen.WebAPI.Tests/EchoAgent.cs | 51 -- .../OpenAIChatCompletionMiddlewareTests.cs | 94 --- dotnet/website/.gitignore | 12 - dotnet/website/README.md | 13 - dotnet/website/articles/Agent-overview.md | 43 -- .../articles/AutoGen-Mistral-Overview.md | 26 - .../articles/AutoGen-OpenAI-Overview.md | 17 - .../AutoGen.Gemini/Chat-with-google-gemini.md | 31 - .../AutoGen.Gemini/Chat-with-vertex-gemini.md | 32 - .../Function-call-with-gemini.md | 38 - .../AutoGen.Gemini/Image-chat-with-gemini.md | 25 - .../articles/AutoGen.Gemini/Overview.md | 12 - .../AutoGen.Ollama/Chat-with-llama.md | 27 - .../AutoGen.Ollama/Chat-with-llava.md | 29 - .../AutoGen-SemanticKernel-Overview.md | 19 - .../SemanticKernelAgent-simple-chat.md | 9 - ...manticKernelAgent-support-more-messages.md | 10 - .../SemanticKernelChatAgent-simple-chat.md | 22 - .../Use-kernel-plugin-in-other-agents.md | 27 - dotnet/website/articles/Built-in-messages.md | 37 - .../Consume-LLM-server-from-LM-Studio.md | 20 - .../articles/Create-a-user-proxy-agent.md | 16 - dotnet/website/articles/Create-an-agent.md | 11 - .../Create-type-safe-function-call.md | 41 - .../website/articles/Create-your-own-agent.md | 1 - .../articles/Create-your-own-middleware.md | 1 - .../articles/Function-call-middleware.md | 1 - .../articles/Function-call-overview.md | 52 -- .../Function-call-with-ollama-and-litellm.md | 93 --- .../website/articles/Group-chat-overview.md | 8 - dotnet/website/articles/Group-chat.md | 73 -- dotnet/website/articles/Installation.md | 67 -- .../website/articles/Middleware-overview.md | 27 - .../MistralChatAgent-count-token-usage.md | 28 - .../MistralChatAgent-use-function-call.md | 41 - ...nAIChatAgent-connect-to-third-party-api.md | 49 -- .../articles/OpenAIChatAgent-simple-chat.md | 11 - .../OpenAIChatAgent-support-more-messages.md | 6 - .../OpenAIChatAgent-use-function-call.md | 33 - .../articles/OpenAIChatAgent-use-json-mode.md | 30 - .../articles/Print-message-middleware.md | 27 - dotnet/website/articles/Roundrobin-chat.md | 33 - dotnet/website/articles/Run-dotnet-code.md | 61 -- dotnet/website/articles/Two-agent-chat.md | 19 - dotnet/website/articles/Use-function-call.md | 43 -- .../articles/Use-graph-in-group-chat.md | 25 - ...-between-python-AutoGen-and-autogen.net.md | 37 - dotnet/website/articles/getting-start.md | 26 - dotnet/website/articles/toc.yml | 126 --- dotnet/website/docfx.json | 72 -- dotnet/website/filterConfig.yml | 3 - dotnet/website/images/ag.ico | Bin 3126 -> 0 bytes dotnet/website/images/ag.svg | 9 - .../articles/ConnectTo3PartyOpenAI/output.gif | Bin 121452 -> 0 bytes ...single-turn-tool-call-with-auto-invoke.png | 3 - ...gle-turn-tool-call-without-auto-invoke.png | 3 - .../articles/CreateUserProxyAgent/image-1.png | 3 - .../articles/DynamicGroupChat/dynamicChat.gif | 3 - .../PrintMessageMiddleware/printMessage.png | 3 - .../streamingoutput.gif | 3 - .../SearcherSummarizer.gif | 3 - .../FinalStepsA.png | 3 - .../FinalStepsB.png | 3 - .../FinalStepsC.png | 3 - .../Step5.2OpenAIModel.png | 3 - .../Step5.3ModelNameAndURL.png | 3 - .../UseAutoGenAsModelinAGStudio/Step6.png | 3 - .../UseAutoGenAsModelinAGStudio/Step6b.png | 3 - .../UseAutoGenAsModelinAGStudio/Terminal.png | 3 - .../TheModelTab.png | 3 - dotnet/website/index.md | 1 - dotnet/website/release_note/0.0.16.md | 32 - dotnet/website/release_note/0.0.17.md | 45 -- dotnet/website/release_note/0.1.0.md | 41 - dotnet/website/release_note/toc.yml | 11 - dotnet/website/release_note/update.md | 77 -- dotnet/website/template/public/main.js | 9 - dotnet/website/toc.yml | 20 - dotnet/website/tutorial/Chat-with-an-agent.md | 53 -- .../tutorial/Create-agent-with-tools.md | 105 --- .../website/tutorial/Image-chat-with-agent.md | 50 -- ...AutoGen.Net-agent-as-model-in-AG-Studio.md | 84 -- dotnet/website/tutorial/toc.yml | 11 - test/website/test_process_notebooks.py | 1 - website/docs/Examples.mdx | 134 ---- website/docs/FAQ.mdx | 41 +- website/docs/Gallery.mdx | 16 +- website/process_notebooks.py | 4 +- website/snippets/data/NotebooksMetadata.mdx | 4 +- 437 files changed, 35 insertions(+), 32372 deletions(-) delete mode 100644 .github/workflows/dotnet-build.yml delete mode 100644 .github/workflows/dotnet-release.yml delete mode 100644 dotnet/.config/dotnet-tools.json delete mode 100644 dotnet/.editorconfig delete mode 100644 dotnet/.gitignore delete mode 100644 dotnet/.tools/run_all_notebook.ps1 delete mode 100644 dotnet/.tools/test-aot-compatibility.ps1 delete mode 100644 dotnet/AutoGen.sln delete mode 100644 dotnet/Directory.Build.props delete mode 100644 dotnet/NuGet.config delete mode 100644 dotnet/README.md delete mode 100644 dotnet/eng/MetaInfo.props delete mode 100644 dotnet/eng/Sign.props delete mode 100644 dotnet/eng/Version.props delete mode 100644 dotnet/eng/opensource.snk delete mode 100644 dotnet/global.json delete mode 100644 dotnet/nuget/NUGET.md delete mode 100644 dotnet/nuget/icon.png delete mode 100644 dotnet/nuget/nuget-package.props delete mode 100644 dotnet/resource/images/background.png delete mode 100644 dotnet/resource/images/square.png delete mode 100644 dotnet/sample/AutoGen.Anthropic.Samples/Anthropic_Agent_With_Prompt_Caching.cs delete mode 100644 dotnet/sample/AutoGen.Anthropic.Samples/AutoGen.Anthropic.Samples.csproj delete mode 100644 dotnet/sample/AutoGen.Anthropic.Samples/Create_Anthropic_Agent.cs delete mode 100644 dotnet/sample/AutoGen.Anthropic.Samples/Create_Anthropic_Agent_With_Tool.cs delete mode 100644 dotnet/sample/AutoGen.Anthropic.Samples/Program.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/AutoGen.BasicSample.csproj delete mode 100644 dotnet/sample/AutoGen.BasicSamples/CodeSnippet/AgentCodeSnippet.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/CodeSnippet/BuildInMessageCodeSnippet.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/CodeSnippet/CreateAnAgent.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/CodeSnippet/FunctionCallCodeSnippet.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/CodeSnippet/GetStartCodeSnippet.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/CodeSnippet/MiddlewareAgentCodeSnippet.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/CodeSnippet/MistralAICodeSnippet.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/CodeSnippet/OpenAICodeSnippet.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/CodeSnippet/PrintMessageMiddlewareCodeSnippet.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/CodeSnippet/RunCodeSnippetCodeSnippet.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/CodeSnippet/SemanticKernelCodeSnippet.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/CodeSnippet/TypeSafeFunctionCallCodeSnippet.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/CodeSnippet/UserProxyAgentCodeSnippet.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/Example01_AssistantAgent.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/Example02_TwoAgent_MathChat.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/Example03_Agent_FunctionCall.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/Example04_Dynamic_GroupChat_Coding_Task.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/Example05_Dalle_And_GPT4V.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/Example06_UserProxyAgent.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/Example07_Dynamic_GroupChat_Calculate_Fibonacci.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/Example08_LMStudio.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/Example09_LMStudio_FunctionCall.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/Example10_SemanticKernel.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/Example11_Sequential_GroupChat_Example.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/Example12_TwoAgent_Fill_Application.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/Example13_OpenAIAgent_JsonMode.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/Example14_MistralClientAgent_TokenCount.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/Example15_GPT4V_BinaryDataImageMessage.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/Example16_OpenAIChatAgent_ConnectToThirdPartyBackend.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/Example17_ReActAgent.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/GettingStart/Agent_Middleware.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/GettingStart/Chat_With_Agent.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/GettingStart/Dynamic_Group_Chat.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/GettingStart/FSM_Group_Chat.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/GettingStart/Image_Chat_With_Agent.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/GettingStart/Streaming_Tool_Call.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/GlobalUsing.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/LLMConfiguration.cs delete mode 100644 dotnet/sample/AutoGen.BasicSamples/Program.cs delete mode 100644 dotnet/sample/AutoGen.Gemini.Sample/AutoGen.Gemini.Sample.csproj delete mode 100644 dotnet/sample/AutoGen.Gemini.Sample/Chat_With_Google_Gemini.cs delete mode 100644 dotnet/sample/AutoGen.Gemini.Sample/Chat_With_Vertex_Gemini.cs delete mode 100644 dotnet/sample/AutoGen.Gemini.Sample/Function_Call_With_Gemini.cs delete mode 100644 dotnet/sample/AutoGen.Gemini.Sample/Image_Chat_With_Vertex_Gemini.cs delete mode 100644 dotnet/sample/AutoGen.Gemini.Sample/Program.cs delete mode 100644 dotnet/sample/AutoGen.Ollama.Sample/AutoGen.Ollama.Sample.csproj delete mode 100644 dotnet/sample/AutoGen.Ollama.Sample/Chat_With_LLaMA.cs delete mode 100644 dotnet/sample/AutoGen.Ollama.Sample/Chat_With_LLaVA.cs delete mode 100644 dotnet/sample/AutoGen.Ollama.Sample/Program.cs delete mode 100644 dotnet/sample/AutoGen.OpenAI.Sample/AutoGen.OpenAI.V1.Sample.csproj delete mode 100644 dotnet/sample/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs delete mode 100644 dotnet/sample/AutoGen.OpenAI.Sample/Program.cs delete mode 100644 dotnet/sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs delete mode 100644 dotnet/sample/AutoGen.OpenAI.Sample/Use_Json_Mode.cs delete mode 100644 dotnet/sample/AutoGen.SemanticKernel.Sample/AutoGen.SemanticKernel.Sample.csproj delete mode 100644 dotnet/sample/AutoGen.SemanticKernel.Sample/Create_Semantic_Kernel_Agent.cs delete mode 100644 dotnet/sample/AutoGen.SemanticKernel.Sample/Create_Semantic_Kernel_Chat_Agent.cs delete mode 100644 dotnet/sample/AutoGen.SemanticKernel.Sample/Program.cs delete mode 100644 dotnet/sample/AutoGen.SemanticKernel.Sample/Use_Bing_Search_With_Semantic_Kernel_Agent.cs delete mode 100644 dotnet/sample/AutoGen.SemanticKernel.Sample/Use_Kernel_Functions_With_Other_Agent.cs delete mode 100644 dotnet/sample/AutoGen.WebAPI.Sample/AutoGen.WebAPI.Sample.csproj delete mode 100644 dotnet/sample/AutoGen.WebAPI.Sample/Program.cs delete mode 100644 dotnet/src/AutoGen.Anthropic/Agent/AnthropicClientAgent.cs delete mode 100644 dotnet/src/AutoGen.Anthropic/AnthropicClient.cs delete mode 100644 dotnet/src/AutoGen.Anthropic/AutoGen.Anthropic.csproj delete mode 100644 dotnet/src/AutoGen.Anthropic/Converters/ContentBaseConverter.cs delete mode 100644 dotnet/src/AutoGen.Anthropic/Converters/JsonPropertyNameEnumCoverter.cs delete mode 100644 dotnet/src/AutoGen.Anthropic/Converters/SystemMessageConverter.cs delete mode 100644 dotnet/src/AutoGen.Anthropic/DTO/ChatCompletionRequest.cs delete mode 100644 dotnet/src/AutoGen.Anthropic/DTO/ChatCompletionResponse.cs delete mode 100644 dotnet/src/AutoGen.Anthropic/DTO/Content.cs delete mode 100644 dotnet/src/AutoGen.Anthropic/DTO/ErrorResponse.cs delete mode 100644 dotnet/src/AutoGen.Anthropic/DTO/Tool.cs delete mode 100644 dotnet/src/AutoGen.Anthropic/DTO/ToolChoice.cs delete mode 100644 dotnet/src/AutoGen.Anthropic/Extensions/AnthropicAgentExtension.cs delete mode 100644 dotnet/src/AutoGen.Anthropic/Middleware/AnthropicMessageConnector.cs delete mode 100644 dotnet/src/AutoGen.Anthropic/Utils/AnthropicConstants.cs delete mode 100644 dotnet/src/AutoGen.AzureAIInference/Agent/ChatCompletionsClientAgent.cs delete mode 100644 dotnet/src/AutoGen.AzureAIInference/AutoGen.AzureAIInference.csproj delete mode 100644 dotnet/src/AutoGen.AzureAIInference/Extension/ChatComptionClientAgentExtension.cs delete mode 100644 dotnet/src/AutoGen.AzureAIInference/Extension/FunctionContractExtension.cs delete mode 100644 dotnet/src/AutoGen.AzureAIInference/Middleware/AzureAIInferenceChatRequestMessageConnector.cs delete mode 100644 dotnet/src/AutoGen.Core/Agent/DefaultReplyAgent.cs delete mode 100644 dotnet/src/AutoGen.Core/Agent/GroupChatManager.cs delete mode 100644 dotnet/src/AutoGen.Core/Agent/IAgent.cs delete mode 100644 dotnet/src/AutoGen.Core/Agent/IMiddlewareAgent.cs delete mode 100644 dotnet/src/AutoGen.Core/Agent/IStreamingAgent.cs delete mode 100644 dotnet/src/AutoGen.Core/Agent/MiddlewareAgent.cs delete mode 100644 dotnet/src/AutoGen.Core/Agent/MiddlewareStreamingAgent.cs delete mode 100644 dotnet/src/AutoGen.Core/AutoGen.Core.csproj delete mode 100644 dotnet/src/AutoGen.Core/Extension/AgentExtension.cs delete mode 100644 dotnet/src/AutoGen.Core/Extension/GroupChatExtension.cs delete mode 100644 dotnet/src/AutoGen.Core/Extension/MessageExtension.cs delete mode 100644 dotnet/src/AutoGen.Core/Extension/MiddlewareExtension.cs delete mode 100644 dotnet/src/AutoGen.Core/Extension/PrintMessageMiddlewareExtension.cs delete mode 100644 dotnet/src/AutoGen.Core/Extension/StreamingMiddlewareExtension.cs delete mode 100644 dotnet/src/AutoGen.Core/Function/FunctionAttribute.cs delete mode 100644 dotnet/src/AutoGen.Core/GroupChat/Graph.cs delete mode 100644 dotnet/src/AutoGen.Core/GroupChat/GroupChat.cs delete mode 100644 dotnet/src/AutoGen.Core/GroupChat/IGroupChat.cs delete mode 100644 dotnet/src/AutoGen.Core/GroupChat/RoundRobinGroupChat.cs delete mode 100644 dotnet/src/AutoGen.Core/ILLMConfig.cs delete mode 100644 dotnet/src/AutoGen.Core/Message/AggregateMessage.cs delete mode 100644 dotnet/src/AutoGen.Core/Message/IMessage.cs delete mode 100644 dotnet/src/AutoGen.Core/Message/ImageMessage.cs delete mode 100644 dotnet/src/AutoGen.Core/Message/Message.cs delete mode 100644 dotnet/src/AutoGen.Core/Message/MessageEnvelope.cs delete mode 100644 dotnet/src/AutoGen.Core/Message/MultiModalMessage.cs delete mode 100644 dotnet/src/AutoGen.Core/Message/Role.cs delete mode 100644 dotnet/src/AutoGen.Core/Message/TextMessage.cs delete mode 100644 dotnet/src/AutoGen.Core/Message/ToolCallAggregateMessage.cs delete mode 100644 dotnet/src/AutoGen.Core/Message/ToolCallMessage.cs delete mode 100644 dotnet/src/AutoGen.Core/Message/ToolCallResultMessage.cs delete mode 100644 dotnet/src/AutoGen.Core/Middleware/DelegateMiddleware.cs delete mode 100644 dotnet/src/AutoGen.Core/Middleware/FunctionCallMiddleware.cs delete mode 100644 dotnet/src/AutoGen.Core/Middleware/IMiddleware.cs delete mode 100644 dotnet/src/AutoGen.Core/Middleware/IStreamingMiddleware.cs delete mode 100644 dotnet/src/AutoGen.Core/Middleware/MiddlewareContext.cs delete mode 100644 dotnet/src/AutoGen.Core/Middleware/PrintMessageMiddleware.cs delete mode 100644 dotnet/src/AutoGen.Core/Orchestrator/IOrchestrator.cs delete mode 100644 dotnet/src/AutoGen.Core/Orchestrator/RolePlayOrchestrator.cs delete mode 100644 dotnet/src/AutoGen.Core/Orchestrator/RoundRobinOrchestrator.cs delete mode 100644 dotnet/src/AutoGen.Core/Orchestrator/WorkflowOrchestrator.cs delete mode 100644 dotnet/src/AutoGen.DotnetInteractive/AutoGen.DotnetInteractive.csproj delete mode 100644 dotnet/src/AutoGen.DotnetInteractive/DotnetInteractiveFunction.cs delete mode 100644 dotnet/src/AutoGen.DotnetInteractive/DotnetInteractiveKernelBuilder.cs delete mode 100644 dotnet/src/AutoGen.DotnetInteractive/DotnetInteractiveStdioKernelConnector.cs delete mode 100644 dotnet/src/AutoGen.DotnetInteractive/Extension/AgentExtension.cs delete mode 100644 dotnet/src/AutoGen.DotnetInteractive/Extension/KernelExtension.cs delete mode 100644 dotnet/src/AutoGen.DotnetInteractive/Extension/MessageExtension.cs delete mode 100644 dotnet/src/AutoGen.DotnetInteractive/GlobalUsing.cs delete mode 100644 dotnet/src/AutoGen.DotnetInteractive/InProccessDotnetInteractiveKernelBuilder.cs delete mode 100644 dotnet/src/AutoGen.DotnetInteractive/InteractiveService.cs delete mode 100644 dotnet/src/AutoGen.DotnetInteractive/RestoreInteractive.config delete mode 100644 dotnet/src/AutoGen.DotnetInteractive/dotnet-tools.json delete mode 100644 dotnet/src/AutoGen.Gemini/AutoGen.Gemini.csproj delete mode 100644 dotnet/src/AutoGen.Gemini/Extension/FunctionContractExtension.cs delete mode 100644 dotnet/src/AutoGen.Gemini/GeminiChatAgent.cs delete mode 100644 dotnet/src/AutoGen.Gemini/GoogleGeminiClient.cs delete mode 100644 dotnet/src/AutoGen.Gemini/IGeminiClient.cs delete mode 100644 dotnet/src/AutoGen.Gemini/Middleware/GeminiAgentExtension.cs delete mode 100644 dotnet/src/AutoGen.Gemini/Middleware/GeminiMessageConnector.cs delete mode 100644 dotnet/src/AutoGen.Gemini/VertexGeminiClient.cs delete mode 100644 dotnet/src/AutoGen.LMStudio/AutoGen.LMStudio.csproj delete mode 100644 dotnet/src/AutoGen.LMStudio/GlobalUsing.cs delete mode 100644 dotnet/src/AutoGen.LMStudio/LMStudioAgent.cs delete mode 100644 dotnet/src/AutoGen.LMStudio/LMStudioConfig.cs delete mode 100644 dotnet/src/AutoGen.LMStudio/README.md delete mode 100644 dotnet/src/AutoGen.Mistral/Agent/MistralClientAgent.cs delete mode 100644 dotnet/src/AutoGen.Mistral/AutoGen.Mistral.csproj delete mode 100644 dotnet/src/AutoGen.Mistral/Converters/JsonPropertyNameEnumConverter.cs delete mode 100644 dotnet/src/AutoGen.Mistral/DTOs/ChatCompletionRequest.cs delete mode 100644 dotnet/src/AutoGen.Mistral/DTOs/ChatCompletionResponse.cs delete mode 100644 dotnet/src/AutoGen.Mistral/DTOs/ChatMessage.cs delete mode 100644 dotnet/src/AutoGen.Mistral/DTOs/Choice.cs delete mode 100644 dotnet/src/AutoGen.Mistral/DTOs/Error.cs delete mode 100644 dotnet/src/AutoGen.Mistral/DTOs/ErrorResponse.cs delete mode 100644 dotnet/src/AutoGen.Mistral/DTOs/FunctionDefinition.cs delete mode 100644 dotnet/src/AutoGen.Mistral/DTOs/Model.cs delete mode 100644 dotnet/src/AutoGen.Mistral/DTOs/ResponseFormat.cs delete mode 100644 dotnet/src/AutoGen.Mistral/DTOs/Tool.cs delete mode 100644 dotnet/src/AutoGen.Mistral/DTOs/Usage.cs delete mode 100644 dotnet/src/AutoGen.Mistral/Extension/FunctionContractExtension.cs delete mode 100644 dotnet/src/AutoGen.Mistral/Extension/MistralAgentExtension.cs delete mode 100644 dotnet/src/AutoGen.Mistral/Middleware/MistralChatMessageConnector.cs delete mode 100644 dotnet/src/AutoGen.Mistral/MistralAIModelID.cs delete mode 100644 dotnet/src/AutoGen.Mistral/MistralClient.cs delete mode 100644 dotnet/src/AutoGen.Ollama/Agent/OllamaAgent.cs delete mode 100644 dotnet/src/AutoGen.Ollama/AutoGen.Ollama.csproj delete mode 100644 dotnet/src/AutoGen.Ollama/DTOs/ChatRequest.cs delete mode 100644 dotnet/src/AutoGen.Ollama/DTOs/ChatResponse.cs delete mode 100644 dotnet/src/AutoGen.Ollama/DTOs/ChatResponseUpdate.cs delete mode 100644 dotnet/src/AutoGen.Ollama/DTOs/Message.cs delete mode 100644 dotnet/src/AutoGen.Ollama/DTOs/ModelReplyOptions.cs delete mode 100644 dotnet/src/AutoGen.Ollama/DTOs/OllamaReplyOptions.cs delete mode 100644 dotnet/src/AutoGen.Ollama/Embeddings/ITextEmbeddingService.cs delete mode 100644 dotnet/src/AutoGen.Ollama/Embeddings/OllamaTextEmbeddingService.cs delete mode 100644 dotnet/src/AutoGen.Ollama/Embeddings/TextEmbeddingsRequest.cs delete mode 100644 dotnet/src/AutoGen.Ollama/Embeddings/TextEmbeddingsResponse.cs delete mode 100644 dotnet/src/AutoGen.Ollama/Extension/OllamaAgentExtension.cs delete mode 100644 dotnet/src/AutoGen.Ollama/Middlewares/OllamaMessageConnector.cs delete mode 100644 dotnet/src/AutoGen.Ollama/OllamaConsts.cs delete mode 100644 dotnet/src/AutoGen.OpenAI.V1/Agent/GPTAgent.cs delete mode 100644 dotnet/src/AutoGen.OpenAI.V1/Agent/OpenAIChatAgent.cs delete mode 100644 dotnet/src/AutoGen.OpenAI.V1/AutoGen.OpenAI.V1.csproj delete mode 100644 dotnet/src/AutoGen.OpenAI.V1/AzureOpenAIConfig.cs delete mode 100644 dotnet/src/AutoGen.OpenAI.V1/Extension/FunctionContractExtension.cs delete mode 100644 dotnet/src/AutoGen.OpenAI.V1/Extension/MessageExtension.cs delete mode 100644 dotnet/src/AutoGen.OpenAI.V1/Extension/OpenAIAgentExtension.cs delete mode 100644 dotnet/src/AutoGen.OpenAI.V1/GlobalUsing.cs delete mode 100644 dotnet/src/AutoGen.OpenAI.V1/Middleware/OpenAIChatRequestMessageConnector.cs delete mode 100644 dotnet/src/AutoGen.OpenAI.V1/OpenAIConfig.cs delete mode 100644 dotnet/src/AutoGen.SemanticKernel/AutoGen.SemanticKernel.csproj delete mode 100644 dotnet/src/AutoGen.SemanticKernel/Extension/KernelExtension.cs delete mode 100644 dotnet/src/AutoGen.SemanticKernel/Extension/SemanticKernelAgentExtension.cs delete mode 100644 dotnet/src/AutoGen.SemanticKernel/GlobalUsing.cs delete mode 100644 dotnet/src/AutoGen.SemanticKernel/Middleware/KernelPluginMiddleware.cs delete mode 100644 dotnet/src/AutoGen.SemanticKernel/Middleware/SemanticKernelChatMessageContentConnector.cs delete mode 100644 dotnet/src/AutoGen.SemanticKernel/SemanticKernelAgent.cs delete mode 100644 dotnet/src/AutoGen.SemanticKernel/SemanticKernelChatCompletionAgent.cs delete mode 100644 dotnet/src/AutoGen.SourceGenerator/AutoGen.SourceGenerator.csproj delete mode 100644 dotnet/src/AutoGen.SourceGenerator/DocumentCommentExtension.cs delete mode 100644 dotnet/src/AutoGen.SourceGenerator/FunctionCallGenerator.cs delete mode 100644 dotnet/src/AutoGen.SourceGenerator/FunctionExtension.cs delete mode 100644 dotnet/src/AutoGen.SourceGenerator/README.md delete mode 100644 dotnet/src/AutoGen.SourceGenerator/SourceGeneratorFunctionContract.cs delete mode 100644 dotnet/src/AutoGen.SourceGenerator/Template/FunctionCallTemplate.cs delete mode 100644 dotnet/src/AutoGen.SourceGenerator/Template/FunctionCallTemplate.tt delete mode 100644 dotnet/src/AutoGen.WebAPI/AutoGen.WebAPI.csproj delete mode 100644 dotnet/src/AutoGen.WebAPI/Extension.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/Converter/OpenAIMessageConverter.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIAssistantMessage.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletion.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletionChoice.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletionMessage.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletionOption.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletionUsage.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIImageUrlObject.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIMessage.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIStreamOptions.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAISystemMessage.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIToolCallObject.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIToolMessage.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserImageContent.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserMessage.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserMessageItem.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserMultiModalMessage.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserTextContent.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAI/Service/OpenAIChatCompletionService.cs delete mode 100644 dotnet/src/AutoGen.WebAPI/OpenAIChatCompletionMiddleware.cs delete mode 100644 dotnet/src/AutoGen/API/LLMConfigAPI.cs delete mode 100644 dotnet/src/AutoGen/Agent/AssistantAgent.cs delete mode 100644 dotnet/src/AutoGen/Agent/ConversableAgent.cs delete mode 100644 dotnet/src/AutoGen/Agent/UserProxyAgent.cs delete mode 100644 dotnet/src/AutoGen/AutoGen.csproj delete mode 100644 dotnet/src/AutoGen/ConversableAgentConfig.cs delete mode 100644 dotnet/src/AutoGen/GlobalUsing.cs delete mode 100644 dotnet/src/AutoGen/Middleware/HumanInputMiddleware.cs delete mode 100644 dotnet/test/.editorconfig delete mode 100644 dotnet/test/AutoGen.Anthropic.Tests/AnthropicClientAgentTest.cs delete mode 100644 dotnet/test/AutoGen.Anthropic.Tests/AnthropicClientTest.cs delete mode 100644 dotnet/test/AutoGen.Anthropic.Tests/AnthropicTestFunctionCalls.cs delete mode 100644 dotnet/test/AutoGen.Anthropic.Tests/AnthropicTestUtils.cs delete mode 100644 dotnet/test/AutoGen.Anthropic.Tests/AutoGen.Anthropic.Tests.csproj delete mode 100644 dotnet/test/AutoGen.Anthropic.Tests/images/.gitattributes delete mode 100644 dotnet/test/AutoGen.Anthropic.Tests/images/square.png delete mode 100644 dotnet/test/AutoGen.AotCompatibility.Tests/AutoGen.AotCompatibility.Tests.csproj delete mode 100644 dotnet/test/AutoGen.AotCompatibility.Tests/Program.cs delete mode 100644 dotnet/test/AutoGen.AzureAIInference.Tests/AutoGen.AzureAIInference.Tests.csproj delete mode 100644 dotnet/test/AutoGen.AzureAIInference.Tests/ChatCompletionClientAgentTests.cs delete mode 100644 dotnet/test/AutoGen.AzureAIInference.Tests/ChatRequestMessageTests.cs delete mode 100644 dotnet/test/AutoGen.DotnetInteractive.Tests/AutoGen.DotnetInteractive.Tests.csproj delete mode 100644 dotnet/test/AutoGen.DotnetInteractive.Tests/DotnetInteractiveServiceTest.cs delete mode 100644 dotnet/test/AutoGen.DotnetInteractive.Tests/DotnetInteractiveStdioKernelConnectorTests.cs delete mode 100644 dotnet/test/AutoGen.DotnetInteractive.Tests/InProcessDotnetInteractiveKernelBuilderTest.cs delete mode 100644 dotnet/test/AutoGen.DotnetInteractive.Tests/MessageExtensionTests.cs delete mode 100644 dotnet/test/AutoGen.Gemini.Tests/ApprovalTests/FunctionContractExtensionTests.ItGenerateGetWeatherToolTest.approved.txt delete mode 100644 dotnet/test/AutoGen.Gemini.Tests/AutoGen.Gemini.Tests.csproj delete mode 100644 dotnet/test/AutoGen.Gemini.Tests/FunctionContractExtensionTests.cs delete mode 100644 dotnet/test/AutoGen.Gemini.Tests/Functions.cs delete mode 100644 dotnet/test/AutoGen.Gemini.Tests/GeminiAgentTests.cs delete mode 100644 dotnet/test/AutoGen.Gemini.Tests/GeminiMessageTests.cs delete mode 100644 dotnet/test/AutoGen.Gemini.Tests/GoogleGeminiClientTests.cs delete mode 100644 dotnet/test/AutoGen.Gemini.Tests/SampleTests.cs delete mode 100644 dotnet/test/AutoGen.Gemini.Tests/VertexGeminiClientTests.cs delete mode 100644 dotnet/test/AutoGen.Mistral.Tests/AutoGen.Mistral.Tests.csproj delete mode 100644 dotnet/test/AutoGen.Mistral.Tests/MistralClientAgentTests.cs delete mode 100644 dotnet/test/AutoGen.Mistral.Tests/MistralClientTests.cs delete mode 100644 dotnet/test/AutoGen.Ollama.Tests/AutoGen.Ollama.Tests.csproj delete mode 100644 dotnet/test/AutoGen.Ollama.Tests/OllamaAgentTests.cs delete mode 100644 dotnet/test/AutoGen.Ollama.Tests/OllamaMessageTests.cs delete mode 100644 dotnet/test/AutoGen.Ollama.Tests/OllamaTextEmbeddingServiceTests.cs delete mode 100644 dotnet/test/AutoGen.Ollama.Tests/images/image.png delete mode 100644 dotnet/test/AutoGen.Ollama.Tests/images/square.png delete mode 100644 dotnet/test/AutoGen.OpenAI.V1.Tests/ApprovalTests/OpenAIMessageTests.BasicMessageTest.approved.txt delete mode 100644 dotnet/test/AutoGen.OpenAI.V1.Tests/AutoGen.OpenAI.V1.Tests.csproj delete mode 100644 dotnet/test/AutoGen.OpenAI.V1.Tests/GlobalUsing.cs delete mode 100644 dotnet/test/AutoGen.OpenAI.V1.Tests/MathClassTest.cs delete mode 100644 dotnet/test/AutoGen.OpenAI.V1.Tests/OpenAIChatAgentTest.cs delete mode 100644 dotnet/test/AutoGen.OpenAI.V1.Tests/OpenAIMessageTests.cs delete mode 100644 dotnet/test/AutoGen.SemanticKernel.Tests/ApprovalTests/KernelFunctionExtensionTests.ItCreateFunctionContractsFromMethod.approved.txt delete mode 100644 dotnet/test/AutoGen.SemanticKernel.Tests/ApprovalTests/KernelFunctionExtensionTests.ItCreateFunctionContractsFromPrompt.approved.txt delete mode 100644 dotnet/test/AutoGen.SemanticKernel.Tests/ApprovalTests/KernelFunctionExtensionTests.ItCreateFunctionContractsFromTestPlugin.approved.txt delete mode 100644 dotnet/test/AutoGen.SemanticKernel.Tests/AutoGen.SemanticKernel.Tests.csproj delete mode 100644 dotnet/test/AutoGen.SemanticKernel.Tests/KernelFunctionExtensionTests.cs delete mode 100644 dotnet/test/AutoGen.SemanticKernel.Tests/KernelFunctionMiddlewareTests.cs delete mode 100644 dotnet/test/AutoGen.SemanticKernel.Tests/SemanticKernelAgentTest.cs delete mode 100644 dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionCallTemplateTests.TestFunctionCallTemplate.approved.txt delete mode 100644 dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionExample.Add_Test.approved.txt delete mode 100644 dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionExample.DictionaryToString_Test.approved.txt delete mode 100644 dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionExample.Query_Test.approved.txt delete mode 100644 dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionExample.Sum_Test.approved.txt delete mode 100644 dotnet/test/AutoGen.SourceGenerator.Tests/AutoGen.SourceGenerator.Tests.csproj delete mode 100644 dotnet/test/AutoGen.SourceGenerator.Tests/FilescopeNamespaceFunctionExample.cs delete mode 100644 dotnet/test/AutoGen.SourceGenerator.Tests/FunctionCallTemplateEncodingTests.cs delete mode 100644 dotnet/test/AutoGen.SourceGenerator.Tests/FunctionCallTemplateTests.cs delete mode 100644 dotnet/test/AutoGen.SourceGenerator.Tests/FunctionExample.test.cs delete mode 100644 dotnet/test/AutoGen.SourceGenerator.Tests/FunctionExamples.cs delete mode 100644 dotnet/test/AutoGen.SourceGenerator.Tests/TopLevelStatementFunctionExample.cs delete mode 100644 dotnet/test/AutoGen.Test.Share/Attribute/EnvironmentSpecificFactAttribute.cs delete mode 100644 dotnet/test/AutoGen.Test.Share/Attribute/OpenAIFact.cs delete mode 100644 dotnet/test/AutoGen.Test.Share/AutoGen.Tests.Share.csproj delete mode 100644 dotnet/test/AutoGen.Test.Share/EchoAgent.cs delete mode 100644 dotnet/test/AutoGen.Tests/ApprovalTests/square.png delete mode 100644 dotnet/test/AutoGen.Tests/AutoGen.Tests.csproj delete mode 100644 dotnet/test/AutoGen.Tests/BasicSampleTest.cs delete mode 100644 dotnet/test/AutoGen.Tests/GlobalUsing.cs delete mode 100644 dotnet/test/AutoGen.Tests/GroupChat/GraphTests.cs delete mode 100644 dotnet/test/AutoGen.Tests/GroupChat/GroupChatTests.cs delete mode 100644 dotnet/test/AutoGen.Tests/ImageMessageTests.cs delete mode 100644 dotnet/test/AutoGen.Tests/MiddlewareAgentTest.cs delete mode 100644 dotnet/test/AutoGen.Tests/MiddlewareTest.cs delete mode 100644 dotnet/test/AutoGen.Tests/Orchestrator/RolePlayOrchestratorTests.cs delete mode 100644 dotnet/test/AutoGen.Tests/Orchestrator/RoundRobinOrchestratorTests.cs delete mode 100644 dotnet/test/AutoGen.Tests/Orchestrator/WorkflowOrchestratorTests.cs delete mode 100644 dotnet/test/AutoGen.Tests/SingleAgentTest.cs delete mode 100644 dotnet/test/AutoGen.Tests/TwoAgentTest.cs delete mode 100644 dotnet/test/AutoGen.Tests/WorkflowTest.cs delete mode 100644 dotnet/test/AutoGen.WebAPI.Tests/AutoGen.WebAPI.Tests.csproj delete mode 100644 dotnet/test/AutoGen.WebAPI.Tests/EchoAgent.cs delete mode 100644 dotnet/test/AutoGen.WebAPI.Tests/OpenAIChatCompletionMiddlewareTests.cs delete mode 100644 dotnet/website/.gitignore delete mode 100644 dotnet/website/README.md delete mode 100644 dotnet/website/articles/Agent-overview.md delete mode 100644 dotnet/website/articles/AutoGen-Mistral-Overview.md delete mode 100644 dotnet/website/articles/AutoGen-OpenAI-Overview.md delete mode 100644 dotnet/website/articles/AutoGen.Gemini/Chat-with-google-gemini.md delete mode 100644 dotnet/website/articles/AutoGen.Gemini/Chat-with-vertex-gemini.md delete mode 100644 dotnet/website/articles/AutoGen.Gemini/Function-call-with-gemini.md delete mode 100644 dotnet/website/articles/AutoGen.Gemini/Image-chat-with-gemini.md delete mode 100644 dotnet/website/articles/AutoGen.Gemini/Overview.md delete mode 100644 dotnet/website/articles/AutoGen.Ollama/Chat-with-llama.md delete mode 100644 dotnet/website/articles/AutoGen.Ollama/Chat-with-llava.md delete mode 100644 dotnet/website/articles/AutoGen.SemanticKernel/AutoGen-SemanticKernel-Overview.md delete mode 100644 dotnet/website/articles/AutoGen.SemanticKernel/SemanticKernelAgent-simple-chat.md delete mode 100644 dotnet/website/articles/AutoGen.SemanticKernel/SemanticKernelAgent-support-more-messages.md delete mode 100644 dotnet/website/articles/AutoGen.SemanticKernel/SemanticKernelChatAgent-simple-chat.md delete mode 100644 dotnet/website/articles/AutoGen.SemanticKernel/Use-kernel-plugin-in-other-agents.md delete mode 100644 dotnet/website/articles/Built-in-messages.md delete mode 100644 dotnet/website/articles/Consume-LLM-server-from-LM-Studio.md delete mode 100644 dotnet/website/articles/Create-a-user-proxy-agent.md delete mode 100644 dotnet/website/articles/Create-an-agent.md delete mode 100644 dotnet/website/articles/Create-type-safe-function-call.md delete mode 100644 dotnet/website/articles/Create-your-own-agent.md delete mode 100644 dotnet/website/articles/Create-your-own-middleware.md delete mode 100644 dotnet/website/articles/Function-call-middleware.md delete mode 100644 dotnet/website/articles/Function-call-overview.md delete mode 100644 dotnet/website/articles/Function-call-with-ollama-and-litellm.md delete mode 100644 dotnet/website/articles/Group-chat-overview.md delete mode 100644 dotnet/website/articles/Group-chat.md delete mode 100644 dotnet/website/articles/Installation.md delete mode 100644 dotnet/website/articles/Middleware-overview.md delete mode 100644 dotnet/website/articles/MistralChatAgent-count-token-usage.md delete mode 100644 dotnet/website/articles/MistralChatAgent-use-function-call.md delete mode 100644 dotnet/website/articles/OpenAIChatAgent-connect-to-third-party-api.md delete mode 100644 dotnet/website/articles/OpenAIChatAgent-simple-chat.md delete mode 100644 dotnet/website/articles/OpenAIChatAgent-support-more-messages.md delete mode 100644 dotnet/website/articles/OpenAIChatAgent-use-function-call.md delete mode 100644 dotnet/website/articles/OpenAIChatAgent-use-json-mode.md delete mode 100644 dotnet/website/articles/Print-message-middleware.md delete mode 100644 dotnet/website/articles/Roundrobin-chat.md delete mode 100644 dotnet/website/articles/Run-dotnet-code.md delete mode 100644 dotnet/website/articles/Two-agent-chat.md delete mode 100644 dotnet/website/articles/Use-function-call.md delete mode 100644 dotnet/website/articles/Use-graph-in-group-chat.md delete mode 100644 dotnet/website/articles/function-comparison-page-between-python-AutoGen-and-autogen.net.md delete mode 100644 dotnet/website/articles/getting-start.md delete mode 100644 dotnet/website/articles/toc.yml delete mode 100644 dotnet/website/docfx.json delete mode 100644 dotnet/website/filterConfig.yml delete mode 100644 dotnet/website/images/ag.ico delete mode 100644 dotnet/website/images/ag.svg delete mode 100644 dotnet/website/images/articles/ConnectTo3PartyOpenAI/output.gif delete mode 100644 dotnet/website/images/articles/CreateAgentWithTools/single-turn-tool-call-with-auto-invoke.png delete mode 100644 dotnet/website/images/articles/CreateAgentWithTools/single-turn-tool-call-without-auto-invoke.png delete mode 100644 dotnet/website/images/articles/CreateUserProxyAgent/image-1.png delete mode 100644 dotnet/website/images/articles/DynamicGroupChat/dynamicChat.gif delete mode 100644 dotnet/website/images/articles/PrintMessageMiddleware/printMessage.png delete mode 100644 dotnet/website/images/articles/PrintMessageMiddleware/streamingoutput.gif delete mode 100644 dotnet/website/images/articles/SequentialGroupChat/SearcherSummarizer.gif delete mode 100644 dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/FinalStepsA.png delete mode 100644 dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/FinalStepsB.png delete mode 100644 dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/FinalStepsC.png delete mode 100644 dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Step5.2OpenAIModel.png delete mode 100644 dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Step5.3ModelNameAndURL.png delete mode 100644 dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Step6.png delete mode 100644 dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Step6b.png delete mode 100644 dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Terminal.png delete mode 100644 dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/TheModelTab.png delete mode 100644 dotnet/website/index.md delete mode 100644 dotnet/website/release_note/0.0.16.md delete mode 100644 dotnet/website/release_note/0.0.17.md delete mode 100644 dotnet/website/release_note/0.1.0.md delete mode 100644 dotnet/website/release_note/toc.yml delete mode 100644 dotnet/website/release_note/update.md delete mode 100644 dotnet/website/template/public/main.js delete mode 100644 dotnet/website/toc.yml delete mode 100644 dotnet/website/tutorial/Chat-with-an-agent.md delete mode 100644 dotnet/website/tutorial/Create-agent-with-tools.md delete mode 100644 dotnet/website/tutorial/Image-chat-with-agent.md delete mode 100644 dotnet/website/tutorial/Use-AutoGen.Net-agent-as-model-in-AG-Studio.md delete mode 100644 dotnet/website/tutorial/toc.yml delete mode 100644 website/docs/Examples.mdx diff --git a/.github/workflows/dotnet-build.yml b/.github/workflows/dotnet-build.yml deleted file mode 100644 index 0b11bac402..0000000000 --- a/.github/workflows/dotnet-build.yml +++ /dev/null @@ -1,233 +0,0 @@ -# This workflow will build a .NET project -# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-net - -name: dotnet-ci - -on: - workflow_dispatch: - pull_request: - branches: [ "main" ] - push: - branches: [ "main" ] - merge_group: - types: [checks_requested] - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref }} - cancel-in-progress: ${{ github.ref != 'refs/heads/main' || github.ref != 'refs/heads/dotnet' }} - -permissions: - contents: read - packages: write - -jobs: - paths-filter: - runs-on: ubuntu-latest - outputs: - hasChanges: ${{ steps.filter.outputs.dotnet == 'true'}} - steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v2 - id: filter - with: - filters: | - dotnet: - - "dotnet/**" - workflows: - - ".github/workflows/**" - - name: dotnet has changes - run: echo "dotnet has changes" - if: steps.filter.outputs.dotnet == 'true' - - name: workflows has changes - run: echo "workflows has changes" - if: steps.filter.outputs.workflows == 'true' - build: - name: Dotnet Build - needs: paths-filter - if: needs.paths-filter.outputs.hasChanges == 'true' - defaults: - run: - working-directory: dotnet - strategy: - fail-fast: false - matrix: - os: [ ubuntu-latest, macos-latest ] - python-version: ["3.11"] - runs-on: ${{ matrix.os }} - timeout-minutes: 30 - steps: - - uses: actions/checkout@v4 - with: - lfs: true - - uses: astral-sh/setup-uv@v5 - with: - version: "latest" - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - name: Install jupyter and ipykernel - run: | - uv pip install --system jupyter ipykernel - - name: list available kernels - run: | - python -m jupyter kernelspec list - - name: Setup .NET - uses: actions/setup-dotnet@v4 - with: - dotnet-version: '8.0.x' - - name: Restore dependencies - run: | - # dotnet nuget add source --name dotnet-tool https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json --configfile NuGet.config - dotnet restore -bl - - name: Format check - run: | - echo "Format check" - echo "If you see any error in this step, please run 'dotnet format' locally to format the code." - dotnet format --verify-no-changes -v diag --no-restore - - name: Build - run: | - echo "Build AutoGen" - dotnet build --no-restore --configuration Release -bl /p:SignAssembly=true - - name: Unit Test - run: dotnet test --no-build -bl --configuration Release - aot-test: # this make sure the AutoGen.Core is aot compatible - strategy: - fail-fast: false # ensures the entire test matrix is run, even if one permutation fails - matrix: - os: [ ubuntu-latest ] - version: [ net8.0 ] - needs: build - defaults: - run: - working-directory: dotnet - - runs-on: ${{ matrix.os }} - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 # fetching all - - - name: Setup dotnet - uses: actions/setup-dotnet@v4 - with: - dotnet-version: '8.0.x' - - - name: publish AOT testApp, assert static analysis warning count, and run the app - shell: pwsh - run: ./.tools/test-aot-compatibility.ps1 ${{ matrix.version }} - openai-test: - name: Run openai test - runs-on: ubuntu-latest - environment: dotnet - defaults: - run: - working-directory: dotnet - if: success() && (github.ref == 'refs/heads/main') - needs: aot-test - steps: - - uses: actions/checkout@v4 - with: - lfs: true - - uses: astral-sh/setup-uv@v5 - with: - version: "latest" - - name: Set up Python 3.11 - uses: actions/setup-python@v5 - with: - python-version: 3.11 - - name: Install jupyter and ipykernel - run: | - uv pip install --system jupyter ipykernel - - name: list available kernels - run: | - python -m jupyter kernelspec list - - name: Setup .NET - uses: actions/setup-dotnet@v4 - with: - global-json-file: dotnet/global.json - - name: Restore dependencies - run: | - dotnet restore -bl - - name: Build - run: | - echo "Build AutoGen" - dotnet build --no-restore --configuration Release -bl /p:SignAssembly=true - - name: OpenAI Test - run: dotnet test --no-build -bl --configuration Release - env: - AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }} - AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }} - AZURE_GPT_35_MODEL_ID: ${{ secrets.AZURE_GPT_35_MODEL_ID }} - OEPNAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - - name: Pack - run: | - echo "Create nightly build package" - dotnet pack --no-build --configuration Release --output './output/nightly' -p:VersionSuffix=nightly-${{github.run_id}} -bl - - echo "Create release build package" - dotnet pack --no-build --configuration Release --output './output/release' -bl - - echo "ls output directory" - ls -R ./output - - name: Upload package - uses: actions/upload-artifact@v4 - with: - name: nightly - path: ./dotnet/output/nightly - - name: Upload package - uses: actions/upload-artifact@v4 - with: - name: release - path: ./dotnet/output/release - publish: - environment: dotnet-internal-feed - name: Publish to nightly feeds - runs-on: ubuntu-latest - defaults: - run: - working-directory: dotnet - needs: openai-test - steps: - - name: Setup .NET - uses: actions/setup-dotnet@v4 - with: - dotnet-version: '6.0.x' - source-url: https://devdiv.pkgs.visualstudio.com/DevDiv/_packaging/AutoGen/nuget/v3/index.json - env: - NUGET_AUTH_TOKEN: ${{ secrets.AZURE_DEVOPS_TOKEN }} - - uses: actions/download-artifact@v4 - with: - name: nightly - path: ./dotnet/output/nightly - - uses: actions/download-artifact@v4 - with: - name: release - path: ./dotnet/output/release - - name: Publish nightly package to Azure Devops - run: | - echo "Publish nightly package to Azure Devops" - echo "ls output directory" - ls -R ./output/nightly - dotnet nuget push --api-key AzureArtifacts ./output/nightly/*.nupkg --skip-duplicate - env: - AZURE_ARTIFACTS_FEED_URL: https://devdiv.pkgs.visualstudio.com/DevDiv/_packaging/AutoGen/nuget/v3/index.json - NUGET_AUTH_TOKEN: ${{ secrets.AZURE_DEVOPS_TOKEN }} - continue-on-error: true - - name: Publish nightly package to github package - run: | - echo "Publish nightly package to github package" - echo "ls output directory" - ls -R ./output/nightly - dotnet nuget push --api-key ${{ secrets.GITHUB_TOKEN }} --source "https://nuget.pkg.github.com/microsoft/index.json" ./output/nightly/*.nupkg --skip-duplicate - continue-on-error: true - - name: Publish nightly package to agentchat myget feed - run: | - echo "Publish nightly package to agentchat myget feed" - echo "ls output directory" - ls -R ./output/nightly - dotnet nuget push --api-key ${{ secrets.MYGET_TOKEN }} --source "https://www.myget.org/F/agentchat/api/v3/index.json" ./output/nightly/*.nupkg --skip-duplicate - env: - MYGET_TOKEN: ${{ secrets.MYGET_TOKEN }} - continue-on-error: true - diff --git a/.github/workflows/dotnet-release.yml b/.github/workflows/dotnet-release.yml deleted file mode 100644 index 7166e3e17d..0000000000 --- a/.github/workflows/dotnet-release.yml +++ /dev/null @@ -1,77 +0,0 @@ -# This workflow will build a .NET project -# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-net - -name: dotnet-release - -on: - workflow_dispatch: - push: - branches: - - release/dotnet/** - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref }} - cancel-in-progress: true - -permissions: - contents: read - packages: write - -jobs: - build: - name: Build and release - runs-on: ubuntu-latest - environment: dotnet - defaults: - run: - working-directory: dotnet - steps: - - uses: actions/checkout@v4 - with: - lfs: true - - uses: astral-sh/setup-uv@v5 - with: - version: "latest" - - name: Set up Python 3.11 - uses: actions/setup-python@v5 - with: - python-version: 3.11 - - name: Install jupyter and ipykernel - run: | - uv pip install --system jupyter ipykernel - - name: list available kernels - run: | - python -m jupyter kernelspec list - - name: Setup .NET - uses: actions/setup-dotnet@v4 - with: - dotnet-version: '8.0.x' - - name: Restore dependencies - run: | - dotnet restore -bl - - name: Build - run: | - echo "Build AutoGen" - dotnet build --no-restore --configuration Release -bl /p:SignAssembly=true - - name: Unit Test - run: dotnet test --no-build -bl --configuration Release - env: - AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }} - AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }} - AZURE_GPT_35_MODEL_ID: ${{ secrets.AZURE_GPT_35_MODEL_ID }} - OEPNAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - - name: Pack - run: | - echo "Create release build package" - dotnet pack --no-build --configuration Release --output './output/release' -bl - - echo "ls output directory" - ls -R ./output - - name: Publish package to Nuget - run: | - echo "Publish package to Nuget" - echo "ls output directory" - ls -R ./output/release - # remove AutoGen.SourceGenerator.snupkg because it's an empty package - rm ./output/release/AutoGen.SourceGenerator.*.snupkg - dotnet nuget push --api-key ${{ secrets.AUTOGEN_NUGET_API_KEY }} --source https://api.nuget.org/v3/index.json ./output/release/*.nupkg --skip-duplicate diff --git a/README.md b/README.md index 9ce6729241..e2447a78d8 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ [![Build](https://github.com/ag2ai/ag2/actions/workflows/python-package.yml/badge.svg)](https://github.com/ag2ai/ag2/actions/workflows/python-package.yml) ![Python Version](https://img.shields.io/badge/3.9%20%7C%203.10%20%7C%203.11%20%7C%203.12-blue) [![Discord](https://img.shields.io/discord/1153072414184452236?logo=discord&style=flat)](https://discord.gg/pAbnFJrkgZ) -[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/cloudposse.svg?style=social&label=Follow%20%40ag2ai)](https://x.com/ag2ai) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/cloudposse.svg?style=social&label=Follow%20%40ag2ai)](https://x.com/Chi_Wang_) diff --git a/dotnet/.config/dotnet-tools.json b/dotnet/.config/dotnet-tools.json deleted file mode 100644 index 6b2517ea2c..0000000000 --- a/dotnet/.config/dotnet-tools.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "version": 1, - "isRoot": true, - "tools": { - "dotnet-repl": { - "version": "0.1.205", - "commands": [ - "dotnet-repl" - ] - }, - "docfx": { - "version": "2.67.5", - "commands": [ - "docfx" - ] - } - } -} \ No newline at end of file diff --git a/dotnet/.editorconfig b/dotnet/.editorconfig deleted file mode 100644 index 1bfabf9edd..0000000000 --- a/dotnet/.editorconfig +++ /dev/null @@ -1,179 +0,0 @@ -ο»Ώ# EditorConfig is awesome:http://EditorConfig.org - -# top-most EditorConfig file -root = true - -# Don't use tabs for indentation. -[*] -indent_style = space -# (Please don't specify an indent_size here; that has too many unintended consequences.) - -# Code files -[*.{cs,csx,vb,vbx}] -indent_size = 4 -insert_final_newline = true -charset = utf-8-bom - -[*.xaml] -indent_size = 4 - -[*.ps1] -indent_size = 2 - -# Xml project files -[*.{csproj,vbproj,vcxproj,vcxproj.filters,proj,projitems,shproj}] -indent_size = 2 - -# Xml config files -[*.{props,targets,ruleset,config,nuspec,resx,vsixmanifest,vsct}] -indent_size = 2 - -# JSON files -[*.json] -indent_size = 2 - -[*.groovy] -indent_size = 2 - -# Dotnet code style settings: -[*.{cs,vb}] -# Sort using and Import directives with System.* appearing first -dotnet_sort_system_directives_first = true -dotnet_style_require_accessibility_modifiers = always:warning - -# No blank line between System.* and Microsoft.* -dotnet_separate_import_directive_groups = false - -# Suggest more modern language features when available -dotnet_style_object_initializer = true:suggestion -dotnet_style_collection_initializer = true:suggestion -dotnet_style_coalesce_expression = true:error -dotnet_style_null_propagation = true:error -dotnet_style_explicit_tuple_names = true:suggestion -dotnet_style_prefer_inferred_tuple_names = true:suggestion -dotnet_style_prefer_inferred_anonymous_type_member_names = true:suggestion -dotnet_style_prefer_is_null_check_over_reference_equality_method = true:suggestion -dotnet_style_prefer_conditional_expression_over_return = false -dotnet_style_prefer_conditional_expression_over_assignment = false -dotnet_style_prefer_auto_properties = false - -# Use language keywords instead of framework type names for type references -dotnet_style_predefined_type_for_locals_parameters_members = true:error -dotnet_style_predefined_type_for_member_access = true:error - -# Prefer read-only on fields -dotnet_style_readonly_field = false - -# CSharp code style settings: -[*.cs] - -# Prefer "var" only when the type is apparent -csharp_style_var_for_built_in_types = false:suggestion -csharp_style_var_when_type_is_apparent = true:suggestion -csharp_style_var_elsewhere = false:suggestion - -# Prefer method-like constructs to have a block body -csharp_style_expression_bodied_methods = false:none -csharp_style_expression_bodied_constructors = false:none -csharp_style_expression_bodied_operators = false:none - -# Prefer property-like constructs to have an expression-body -csharp_style_expression_bodied_properties = true:none -csharp_style_expression_bodied_indexers = true:none -csharp_style_expression_bodied_accessors = true:none - -# Use block body for local functions -csharp_style_expression_bodied_local_functions = when_on_single_line:silent - -# Suggest more modern language features when available -csharp_style_pattern_matching_over_is_with_cast_check = true:error -csharp_style_pattern_matching_over_as_with_null_check = true:error -csharp_style_inlined_variable_declaration = true:error -csharp_style_throw_expression = true:suggestion -csharp_style_conditional_delegate_call = true:suggestion -csharp_style_deconstructed_variable_declaration = true:suggestion - -# Newline settings -csharp_new_line_before_open_brace = all -csharp_new_line_before_else = true -csharp_new_line_before_catch = true -csharp_new_line_before_finally = true -csharp_new_line_before_members_in_object_initializers = true -csharp_new_line_before_members_in_anonymous_types = true -csharp_new_line_between_query_expression_clauses = true - -# Identation options -csharp_indent_case_contents = true -csharp_indent_case_contents_when_block = true -csharp_indent_switch_labels = true -csharp_indent_labels = no_change -csharp_indent_block_contents = true -csharp_indent_braces = false - -# Spacing options -csharp_space_after_cast = false -csharp_space_after_keywords_in_control_flow_statements = true -csharp_space_between_method_call_empty_parameter_list_parentheses = false -csharp_space_between_method_call_parameter_list_parentheses = false -csharp_space_between_method_call_name_and_opening_parenthesis = false -csharp_space_between_method_declaration_parameter_list_parentheses = false -csharp_space_between_method_declaration_empty_parameter_list_parentheses = false -csharp_space_between_method_declaration_parameter_list_parentheses = false -csharp_space_between_method_declaration_name_and_open_parenthesis = false -csharp_space_between_parentheses = false -csharp_space_between_square_brackets = false -csharp_space_between_empty_square_brackets = false -csharp_space_before_open_square_brackets = false -csharp_space_around_declaration_statements = false -csharp_space_around_binary_operators = before_and_after -csharp_space_after_cast = false -csharp_space_before_semicolon_in_for_statement = false -csharp_space_before_dot = false -csharp_space_after_dot = false -csharp_space_before_comma = false -csharp_space_after_comma = true -csharp_space_before_colon_in_inheritance_clause = true -csharp_space_after_colon_in_inheritance_clause = true -csharp_space_after_semicolon_in_for_statement = true - -# Wrapping -csharp_preserve_single_line_statements = true -csharp_preserve_single_line_blocks = true - -# Code block -csharp_prefer_braces = true:warning - -# Using statements -csharp_using_directive_placement = outside_namespace:error - -# Modifier settings -csharp_prefer_static_local_function = true:warning -csharp_preferred_modifier_order = public,private,protected,internal,static,extern,new,virtual,abstract,sealed,override,readonly,unsafe,volatile,async:warning - -# enable format error -dotnet_diagnostic.IDE0055.severity = error - -# IDE0035: Remove unreachable code -dotnet_diagnostic.IDE0035.severity = error - -# IDE0005: Remove unncecessary usings -dotnet_diagnostic.CS8019.severity = error -dotnet_diagnostic.IDE0005.severity = error - -# IDE0069: Remove unused local variable -dotnet_diagnostic.IDE0069.severity = error - -# disable CS1573: Parameter has no matching param tag in the XML comment for -dotnet_diagnostic.CS1573.severity = none - -# disable CS1570: XML comment has badly formed XML -dotnet_diagnostic.CS1570.severity = none - -dotnet_diagnostic.IDE0035.severity = warning # Remove unreachable code -dotnet_diagnostic.IDE0161.severity = warning # Use file-scoped namespace - -csharp_style_var_elsewhere = true:suggestion # Prefer 'var' everywhere - -# disable check for generated code -[*.generated.cs] -generated_code = true \ No newline at end of file diff --git a/dotnet/.gitignore b/dotnet/.gitignore deleted file mode 100644 index 65e7ba678d..0000000000 --- a/dotnet/.gitignore +++ /dev/null @@ -1,30 +0,0 @@ -# gitignore file for C#/VS - -# Build results -[Dd]ebug/ -[Dd]ebugPublic/ -[Rr]elease/ -[Rr]eleases/ -x64/ -x86/ -build/ -bld/ -[Bb]in/ -[Oo]bj/ - -# vs cache -.vs/ - -# vs code cache -.vscode/ - -# Properties -Properties/ - -artifacts/ -output/ - -*.binlog - -# JetBrains Rider -.idea/ \ No newline at end of file diff --git a/dotnet/.tools/run_all_notebook.ps1 b/dotnet/.tools/run_all_notebook.ps1 deleted file mode 100644 index d1001064d5..0000000000 --- a/dotnet/.tools/run_all_notebook.ps1 +++ /dev/null @@ -1,64 +0,0 @@ -# cd to the directory of this script -$scriptPath = Split-Path -Parent $MyInvocation.MyCommand.Definition -$rootPath = Split-Path -Parent $scriptPath -$outputFolder = "$rootPath/output" -if (Test-Path $outputFolder) { - Remove-Item $outputFolder -Recurse -Force -} -New-Item -ItemType Directory -Path $outputFolder - -Set-Location $rootPath - -# list all notebooks under notebook folder -$notebooks = Get-ChildItem -Path "$rootPath/notebook" -Recurse -Include *.ipynb | ForEach-Object { $_.FullName } -# skip those notebooks with the same name as the following -$skip_notebooks = @( - 'TwoAgentChat_UserProxy.ipynb' # require user input -) - -# for each notebook, run it using dotnet perl. Check the exit code and print out the result -# if the exit code is not 0, exit the script with exit code 1 -$failNotebooks = @() -$exitCode = 0 -$LASTEXITCODE = 0 -foreach ($notebook in $notebooks) { - Write-Host "Running $notebook" - # get notebook name with extension - $name = Split-Path -Leaf $notebook - - if ($skip_notebooks -contains $name) { - Write-Host "Skipping $name" - continue - } - Write-Host "Name: $name" - $notebookFolder = Split-Path -Parent $notebook - $outputPath = "$outputFolder\$notebookFolder" - Set-Location $notebookFolder - $proc = Start-Process -FilePath dotnet -ArgumentList "repl --run $name --exit-after-run" -PassThru -NoNewWindow - $timeout = $null - $proc | Wait-Process -Timeout 180 -ErrorAction SilentlyContinue -ErrorVariable $timeout - if ($timeout) { - Write-Host "Timeout when running $notebook" - $LASTEXITCODE = 1 - } - else { - $LASTEXITCODE = $proc.ExitCode - } - Write-Host "Exit code: $LASTEXITCODE" - if ($LASTEXITCODE -ne 0) { - Write-Host "Failed to run $notebook" - $failNotebooks += $notebook - $exitCode = 1 - } - else{ - Write-Host "Successfully ran $notebook" - } - Set-Location $rootPath -} - -Write-Host "Failed notebooks:" -foreach ($notebook in $failNotebooks) { - Write-Host $notebook -} - -$failNotebooks | Should -BeNullOrEmpty \ No newline at end of file diff --git a/dotnet/.tools/test-aot-compatibility.ps1 b/dotnet/.tools/test-aot-compatibility.ps1 deleted file mode 100644 index 071edcd956..0000000000 --- a/dotnet/.tools/test-aot-compatibility.ps1 +++ /dev/null @@ -1,41 +0,0 @@ -param([string]$targetNetFramework) - -$rootDirectory = Split-Path $PSScriptRoot -Parent -$publishOutput = dotnet publish $rootDirectory/test/AutoGen.AotCompatibility.Tests -nodeReuse:false /p:UseSharedCompilation=false /p:ExposeExperimentalFeatures=true - -$actualWarningCount = 0 - -foreach ($line in $($publishOutput -split "`r`n")) -{ - if ($line -like "*analysis warning IL*") - { - Write-Host $line - - $actualWarningCount += 1 - } -} - -pushd $rootDirectory/test/AutoGen.AotCompatibility.Tests/bin/Release/$targetNetFramework/linux-x64 - -Write-Host "Executing test App..." -./AutoGen.AotCompatibility.Tests -Write-Host "Finished executing test App" - -if ($LastExitCode -ne 0) -{ - Write-Host "There was an error while executing AotCompatibility Test App. LastExitCode is:", $LastExitCode -} - -popd - -Write-Host "Actual warning count is:", $actualWarningCount -$expectedWarningCount = 0 - -$testPassed = 0 -if ($actualWarningCount -ne $expectedWarningCount) -{ - $testPassed = 1 - Write-Host "Actual warning count:", actualWarningCount, "is not as expected. Expected warning count is:", $expectedWarningCount -} - -Exit $testPassed \ No newline at end of file diff --git a/dotnet/AutoGen.sln b/dotnet/AutoGen.sln deleted file mode 100644 index db0b2cbb54..0000000000 --- a/dotnet/AutoGen.sln +++ /dev/null @@ -1,257 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio Version 17 -VisualStudioVersion = 17.8.34322.80 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen", "src\AutoGen\AutoGen.csproj", "{B2B27ACB-AA50-4FED-A06C-3AD6B4218188}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{18BF8DD7-0585-48BF-8F97-AD333080CE06}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "test", "test", "{F823671B-3ECA-4AE6-86DA-25E920D3FE64}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Tests", "test\AutoGen.Tests\AutoGen.Tests.csproj", "{FDD99AEC-4C57-4020-B23F-650612856102}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.SourceGenerator", "src\AutoGen.SourceGenerator\AutoGen.SourceGenerator.csproj", "{3FFD14E3-D6BC-4EA7-97A2-D21733060FD6}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.SourceGenerator.Tests", "test\AutoGen.SourceGenerator.Tests\AutoGen.SourceGenerator.Tests.csproj", "{05A2FAD8-03B0-4B2F-82AF-2F6BF0F050E5}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.BasicSample", "sample\AutoGen.BasicSamples\AutoGen.BasicSample.csproj", "{7EBF916A-A7B1-4B74-AF10-D705B7A18F58}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "sample", "sample", "{FBFEAD1F-29EB-4D99-A672-0CD8473E10B9}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.DotnetInteractive", "src\AutoGen.DotnetInteractive\AutoGen.DotnetInteractive.csproj", "{B61D8008-7FB7-4C0E-8044-3A74AA63A596}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.LMStudio", "src\AutoGen.LMStudio\AutoGen.LMStudio.csproj", "{F98BDA9B-8657-4BA8-9B03-BAEA454CAE60}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.SemanticKernel", "src\AutoGen.SemanticKernel\AutoGen.SemanticKernel.csproj", "{45D6FC80-36F3-4967-9663-E20B63824621}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Core", "src\AutoGen.Core\AutoGen.Core.csproj", "{D58D43D1-0617-4A3D-9932-C773E6398535}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.OpenAI.V1", "src\AutoGen.OpenAI.V1\AutoGen.OpenAI.V1.csproj", "{63445BB7-DBB9-4AEF-9D6F-98BBE75EE1EC}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Mistral", "src\AutoGen.Mistral\AutoGen.Mistral.csproj", "{6585D1A4-3D97-4D76-A688-1933B61AEB19}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Mistral.Tests", "test\AutoGen.Mistral.Tests\AutoGen.Mistral.Tests.csproj", "{15441693-3659-4868-B6C1-B106F52FF3BA}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.WebAPI", "src\AutoGen.WebAPI\AutoGen.WebAPI.csproj", "{257FFD71-08E5-40C7-AB04-6A81A78EB410}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.WebAPI.Tests", "test\AutoGen.WebAPI.Tests\AutoGen.WebAPI.Tests.csproj", "{E2EF5E66-683C-4DDC-8ADA-5F676502B9BA}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.SemanticKernel.Tests", "test\AutoGen.SemanticKernel.Tests\AutoGen.SemanticKernel.Tests.csproj", "{1DFABC4A-8458-4875-8DCB-59F3802DAC65}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.OpenAI.V1.Tests", "test\AutoGen.OpenAI.V1.Tests\AutoGen.OpenAI.V1.Tests.csproj", "{D36A85F9-C172-487D-8192-6BFE5D05B4A7}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.DotnetInteractive.Tests", "test\AutoGen.DotnetInteractive.Tests\AutoGen.DotnetInteractive.Tests.csproj", "{B61388CA-DC73-4B7F-A7B2-7B9A86C9229E}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Ollama", "src\AutoGen.Ollama\AutoGen.Ollama.csproj", "{9F9E6DED-3D92-4970-909A-70FC11F1A665}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Ollama.Tests", "test\AutoGen.Ollama.Tests\AutoGen.Ollama.Tests.csproj", "{03E31CAA-3728-48D3-B936-9F11CF6C18FE}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Ollama.Sample", "sample\AutoGen.Ollama.Sample\AutoGen.Ollama.Sample.csproj", "{93AA4D0D-6EE4-44D5-AD77-7F73A3934544}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.SemanticKernel.Sample", "sample\AutoGen.SemanticKernel.Sample\AutoGen.SemanticKernel.Sample.csproj", "{52958A60-3FF7-4243-9058-34A6E4F55C31}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Anthropic", "src\AutoGen.Anthropic\AutoGen.Anthropic.csproj", "{6A95E113-B824-4524-8F13-CD0C3E1C8804}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Anthropic.Tests", "test\AutoGen.Anthropic.Tests\AutoGen.Anthropic.Tests.csproj", "{815E937E-86D6-4476-9EC6-B7FBCBBB5DB6}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Anthropic.Samples", "sample\AutoGen.Anthropic.Samples\AutoGen.Anthropic.Samples.csproj", "{834B4E85-64E5-4382-8465-548F332E5298}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Gemini", "src\AutoGen.Gemini\AutoGen.Gemini.csproj", "{EFE0DC86-80FC-4D52-95B7-07654BA1A769}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Gemini.Tests", "test\AutoGen.Gemini.Tests\AutoGen.Gemini.Tests.csproj", "{8EA16BAB-465A-4C07-ABC4-1070D40067E9}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Gemini.Sample", "sample\AutoGen.Gemini.Sample\AutoGen.Gemini.Sample.csproj", "{19679B75-CE3A-4DF0-A3F0-CA369D2760A4}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.AotCompatibility.Tests", "test\AutoGen.AotCompatibility.Tests\AutoGen.AotCompatibility.Tests.csproj", "{6B82F26D-5040-4453-B21B-C8D1F913CE4C}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.OpenAI.V1.Sample", "sample\AutoGen.OpenAI.Sample\AutoGen.OpenAI.V1.Sample.csproj", "{0E635268-351C-4A6B-A28D-593D868C2CA4}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.WebAPI.Sample", "sample\AutoGen.WebAPI.Sample\AutoGen.WebAPI.Sample.csproj", "{12079C18-A519-403F-BBFD-200A36A0C083}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.AzureAIInference", "src\AutoGen.AzureAIInference\AutoGen.AzureAIInference.csproj", "{5C45981D-1319-4C25-935C-83D411CB28DF}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.AzureAIInference.Tests", "test\AutoGen.AzureAIInference.Tests\AutoGen.AzureAIInference.Tests.csproj", "{5970868F-831E-418F-89A9-4EC599563E16}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Tests.Share", "test\AutoGen.Test.Share\AutoGen.Tests.Share.csproj", "{143725E2-206C-4D37-93E4-9EDF699826B2}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Release|Any CPU = Release|Any CPU - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {B2B27ACB-AA50-4FED-A06C-3AD6B4218188}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B2B27ACB-AA50-4FED-A06C-3AD6B4218188}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B2B27ACB-AA50-4FED-A06C-3AD6B4218188}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B2B27ACB-AA50-4FED-A06C-3AD6B4218188}.Release|Any CPU.Build.0 = Release|Any CPU - {FDD99AEC-4C57-4020-B23F-650612856102}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {FDD99AEC-4C57-4020-B23F-650612856102}.Debug|Any CPU.Build.0 = Debug|Any CPU - {FDD99AEC-4C57-4020-B23F-650612856102}.Release|Any CPU.ActiveCfg = Release|Any CPU - {FDD99AEC-4C57-4020-B23F-650612856102}.Release|Any CPU.Build.0 = Release|Any CPU - {3FFD14E3-D6BC-4EA7-97A2-D21733060FD6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3FFD14E3-D6BC-4EA7-97A2-D21733060FD6}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3FFD14E3-D6BC-4EA7-97A2-D21733060FD6}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3FFD14E3-D6BC-4EA7-97A2-D21733060FD6}.Release|Any CPU.Build.0 = Release|Any CPU - {05A2FAD8-03B0-4B2F-82AF-2F6BF0F050E5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {05A2FAD8-03B0-4B2F-82AF-2F6BF0F050E5}.Debug|Any CPU.Build.0 = Debug|Any CPU - {05A2FAD8-03B0-4B2F-82AF-2F6BF0F050E5}.Release|Any CPU.ActiveCfg = Release|Any CPU - {05A2FAD8-03B0-4B2F-82AF-2F6BF0F050E5}.Release|Any CPU.Build.0 = Release|Any CPU - {7EBF916A-A7B1-4B74-AF10-D705B7A18F58}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7EBF916A-A7B1-4B74-AF10-D705B7A18F58}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7EBF916A-A7B1-4B74-AF10-D705B7A18F58}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7EBF916A-A7B1-4B74-AF10-D705B7A18F58}.Release|Any CPU.Build.0 = Release|Any CPU - {B61D8008-7FB7-4C0E-8044-3A74AA63A596}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B61D8008-7FB7-4C0E-8044-3A74AA63A596}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B61D8008-7FB7-4C0E-8044-3A74AA63A596}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B61D8008-7FB7-4C0E-8044-3A74AA63A596}.Release|Any CPU.Build.0 = Release|Any CPU - {F98BDA9B-8657-4BA8-9B03-BAEA454CAE60}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {F98BDA9B-8657-4BA8-9B03-BAEA454CAE60}.Debug|Any CPU.Build.0 = Debug|Any CPU - {F98BDA9B-8657-4BA8-9B03-BAEA454CAE60}.Release|Any CPU.ActiveCfg = Release|Any CPU - {F98BDA9B-8657-4BA8-9B03-BAEA454CAE60}.Release|Any CPU.Build.0 = Release|Any CPU - {45D6FC80-36F3-4967-9663-E20B63824621}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {45D6FC80-36F3-4967-9663-E20B63824621}.Debug|Any CPU.Build.0 = Debug|Any CPU - {45D6FC80-36F3-4967-9663-E20B63824621}.Release|Any CPU.ActiveCfg = Release|Any CPU - {45D6FC80-36F3-4967-9663-E20B63824621}.Release|Any CPU.Build.0 = Release|Any CPU - {D58D43D1-0617-4A3D-9932-C773E6398535}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D58D43D1-0617-4A3D-9932-C773E6398535}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D58D43D1-0617-4A3D-9932-C773E6398535}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D58D43D1-0617-4A3D-9932-C773E6398535}.Release|Any CPU.Build.0 = Release|Any CPU - {63445BB7-DBB9-4AEF-9D6F-98BBE75EE1EC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {63445BB7-DBB9-4AEF-9D6F-98BBE75EE1EC}.Debug|Any CPU.Build.0 = Debug|Any CPU - {63445BB7-DBB9-4AEF-9D6F-98BBE75EE1EC}.Release|Any CPU.ActiveCfg = Release|Any CPU - {63445BB7-DBB9-4AEF-9D6F-98BBE75EE1EC}.Release|Any CPU.Build.0 = Release|Any CPU - {6585D1A4-3D97-4D76-A688-1933B61AEB19}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {6585D1A4-3D97-4D76-A688-1933B61AEB19}.Debug|Any CPU.Build.0 = Debug|Any CPU - {6585D1A4-3D97-4D76-A688-1933B61AEB19}.Release|Any CPU.ActiveCfg = Release|Any CPU - {6585D1A4-3D97-4D76-A688-1933B61AEB19}.Release|Any CPU.Build.0 = Release|Any CPU - {15441693-3659-4868-B6C1-B106F52FF3BA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {15441693-3659-4868-B6C1-B106F52FF3BA}.Debug|Any CPU.Build.0 = Debug|Any CPU - {15441693-3659-4868-B6C1-B106F52FF3BA}.Release|Any CPU.ActiveCfg = Release|Any CPU - {15441693-3659-4868-B6C1-B106F52FF3BA}.Release|Any CPU.Build.0 = Release|Any CPU - {257FFD71-08E5-40C7-AB04-6A81A78EB410}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {257FFD71-08E5-40C7-AB04-6A81A78EB410}.Debug|Any CPU.Build.0 = Debug|Any CPU - {257FFD71-08E5-40C7-AB04-6A81A78EB410}.Release|Any CPU.ActiveCfg = Release|Any CPU - {257FFD71-08E5-40C7-AB04-6A81A78EB410}.Release|Any CPU.Build.0 = Release|Any CPU - {E2EF5E66-683C-4DDC-8ADA-5F676502B9BA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {E2EF5E66-683C-4DDC-8ADA-5F676502B9BA}.Debug|Any CPU.Build.0 = Debug|Any CPU - {E2EF5E66-683C-4DDC-8ADA-5F676502B9BA}.Release|Any CPU.ActiveCfg = Release|Any CPU - {E2EF5E66-683C-4DDC-8ADA-5F676502B9BA}.Release|Any CPU.Build.0 = Release|Any CPU - {1DFABC4A-8458-4875-8DCB-59F3802DAC65}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {1DFABC4A-8458-4875-8DCB-59F3802DAC65}.Debug|Any CPU.Build.0 = Debug|Any CPU - {1DFABC4A-8458-4875-8DCB-59F3802DAC65}.Release|Any CPU.ActiveCfg = Release|Any CPU - {1DFABC4A-8458-4875-8DCB-59F3802DAC65}.Release|Any CPU.Build.0 = Release|Any CPU - {D36A85F9-C172-487D-8192-6BFE5D05B4A7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D36A85F9-C172-487D-8192-6BFE5D05B4A7}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D36A85F9-C172-487D-8192-6BFE5D05B4A7}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D36A85F9-C172-487D-8192-6BFE5D05B4A7}.Release|Any CPU.Build.0 = Release|Any CPU - {B61388CA-DC73-4B7F-A7B2-7B9A86C9229E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B61388CA-DC73-4B7F-A7B2-7B9A86C9229E}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B61388CA-DC73-4B7F-A7B2-7B9A86C9229E}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B61388CA-DC73-4B7F-A7B2-7B9A86C9229E}.Release|Any CPU.Build.0 = Release|Any CPU - {9F9E6DED-3D92-4970-909A-70FC11F1A665}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {9F9E6DED-3D92-4970-909A-70FC11F1A665}.Debug|Any CPU.Build.0 = Debug|Any CPU - {9F9E6DED-3D92-4970-909A-70FC11F1A665}.Release|Any CPU.ActiveCfg = Release|Any CPU - {9F9E6DED-3D92-4970-909A-70FC11F1A665}.Release|Any CPU.Build.0 = Release|Any CPU - {03E31CAA-3728-48D3-B936-9F11CF6C18FE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {03E31CAA-3728-48D3-B936-9F11CF6C18FE}.Debug|Any CPU.Build.0 = Debug|Any CPU - {03E31CAA-3728-48D3-B936-9F11CF6C18FE}.Release|Any CPU.ActiveCfg = Release|Any CPU - {03E31CAA-3728-48D3-B936-9F11CF6C18FE}.Release|Any CPU.Build.0 = Release|Any CPU - {93AA4D0D-6EE4-44D5-AD77-7F73A3934544}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {93AA4D0D-6EE4-44D5-AD77-7F73A3934544}.Debug|Any CPU.Build.0 = Debug|Any CPU - {93AA4D0D-6EE4-44D5-AD77-7F73A3934544}.Release|Any CPU.ActiveCfg = Release|Any CPU - {93AA4D0D-6EE4-44D5-AD77-7F73A3934544}.Release|Any CPU.Build.0 = Release|Any CPU - {52958A60-3FF7-4243-9058-34A6E4F55C31}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {52958A60-3FF7-4243-9058-34A6E4F55C31}.Debug|Any CPU.Build.0 = Debug|Any CPU - {52958A60-3FF7-4243-9058-34A6E4F55C31}.Release|Any CPU.ActiveCfg = Release|Any CPU - {52958A60-3FF7-4243-9058-34A6E4F55C31}.Release|Any CPU.Build.0 = Release|Any CPU - {6A95E113-B824-4524-8F13-CD0C3E1C8804}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {6A95E113-B824-4524-8F13-CD0C3E1C8804}.Debug|Any CPU.Build.0 = Debug|Any CPU - {6A95E113-B824-4524-8F13-CD0C3E1C8804}.Release|Any CPU.ActiveCfg = Release|Any CPU - {6A95E113-B824-4524-8F13-CD0C3E1C8804}.Release|Any CPU.Build.0 = Release|Any CPU - {815E937E-86D6-4476-9EC6-B7FBCBBB5DB6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {815E937E-86D6-4476-9EC6-B7FBCBBB5DB6}.Debug|Any CPU.Build.0 = Debug|Any CPU - {815E937E-86D6-4476-9EC6-B7FBCBBB5DB6}.Release|Any CPU.ActiveCfg = Release|Any CPU - {815E937E-86D6-4476-9EC6-B7FBCBBB5DB6}.Release|Any CPU.Build.0 = Release|Any CPU - {834B4E85-64E5-4382-8465-548F332E5298}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {834B4E85-64E5-4382-8465-548F332E5298}.Debug|Any CPU.Build.0 = Debug|Any CPU - {834B4E85-64E5-4382-8465-548F332E5298}.Release|Any CPU.ActiveCfg = Release|Any CPU - {834B4E85-64E5-4382-8465-548F332E5298}.Release|Any CPU.Build.0 = Release|Any CPU - {EFE0DC86-80FC-4D52-95B7-07654BA1A769}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {EFE0DC86-80FC-4D52-95B7-07654BA1A769}.Debug|Any CPU.Build.0 = Debug|Any CPU - {EFE0DC86-80FC-4D52-95B7-07654BA1A769}.Release|Any CPU.ActiveCfg = Release|Any CPU - {EFE0DC86-80FC-4D52-95B7-07654BA1A769}.Release|Any CPU.Build.0 = Release|Any CPU - {8EA16BAB-465A-4C07-ABC4-1070D40067E9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {8EA16BAB-465A-4C07-ABC4-1070D40067E9}.Debug|Any CPU.Build.0 = Debug|Any CPU - {8EA16BAB-465A-4C07-ABC4-1070D40067E9}.Release|Any CPU.ActiveCfg = Release|Any CPU - {8EA16BAB-465A-4C07-ABC4-1070D40067E9}.Release|Any CPU.Build.0 = Release|Any CPU - {19679B75-CE3A-4DF0-A3F0-CA369D2760A4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {19679B75-CE3A-4DF0-A3F0-CA369D2760A4}.Debug|Any CPU.Build.0 = Debug|Any CPU - {19679B75-CE3A-4DF0-A3F0-CA369D2760A4}.Release|Any CPU.ActiveCfg = Release|Any CPU - {19679B75-CE3A-4DF0-A3F0-CA369D2760A4}.Release|Any CPU.Build.0 = Release|Any CPU - {6B82F26D-5040-4453-B21B-C8D1F913CE4C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {6B82F26D-5040-4453-B21B-C8D1F913CE4C}.Debug|Any CPU.Build.0 = Debug|Any CPU - {6B82F26D-5040-4453-B21B-C8D1F913CE4C}.Release|Any CPU.ActiveCfg = Release|Any CPU - {6B82F26D-5040-4453-B21B-C8D1F913CE4C}.Release|Any CPU.Build.0 = Release|Any CPU - {0E635268-351C-4A6B-A28D-593D868C2CA4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {0E635268-351C-4A6B-A28D-593D868C2CA4}.Debug|Any CPU.Build.0 = Debug|Any CPU - {0E635268-351C-4A6B-A28D-593D868C2CA4}.Release|Any CPU.ActiveCfg = Release|Any CPU - {0E635268-351C-4A6B-A28D-593D868C2CA4}.Release|Any CPU.Build.0 = Release|Any CPU - {12079C18-A519-403F-BBFD-200A36A0C083}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {12079C18-A519-403F-BBFD-200A36A0C083}.Debug|Any CPU.Build.0 = Debug|Any CPU - {12079C18-A519-403F-BBFD-200A36A0C083}.Release|Any CPU.ActiveCfg = Release|Any CPU - {12079C18-A519-403F-BBFD-200A36A0C083}.Release|Any CPU.Build.0 = Release|Any CPU - {5C45981D-1319-4C25-935C-83D411CB28DF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {5C45981D-1319-4C25-935C-83D411CB28DF}.Debug|Any CPU.Build.0 = Debug|Any CPU - {5C45981D-1319-4C25-935C-83D411CB28DF}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5C45981D-1319-4C25-935C-83D411CB28DF}.Release|Any CPU.Build.0 = Release|Any CPU - {5970868F-831E-418F-89A9-4EC599563E16}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {5970868F-831E-418F-89A9-4EC599563E16}.Debug|Any CPU.Build.0 = Debug|Any CPU - {5970868F-831E-418F-89A9-4EC599563E16}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5970868F-831E-418F-89A9-4EC599563E16}.Release|Any CPU.Build.0 = Release|Any CPU - {143725E2-206C-4D37-93E4-9EDF699826B2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {143725E2-206C-4D37-93E4-9EDF699826B2}.Debug|Any CPU.Build.0 = Debug|Any CPU - {143725E2-206C-4D37-93E4-9EDF699826B2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {143725E2-206C-4D37-93E4-9EDF699826B2}.Release|Any CPU.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection - GlobalSection(NestedProjects) = preSolution - {B2B27ACB-AA50-4FED-A06C-3AD6B4218188} = {18BF8DD7-0585-48BF-8F97-AD333080CE06} - {FDD99AEC-4C57-4020-B23F-650612856102} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64} - {3FFD14E3-D6BC-4EA7-97A2-D21733060FD6} = {18BF8DD7-0585-48BF-8F97-AD333080CE06} - {05A2FAD8-03B0-4B2F-82AF-2F6BF0F050E5} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64} - {7EBF916A-A7B1-4B74-AF10-D705B7A18F58} = {FBFEAD1F-29EB-4D99-A672-0CD8473E10B9} - {B61D8008-7FB7-4C0E-8044-3A74AA63A596} = {18BF8DD7-0585-48BF-8F97-AD333080CE06} - {F98BDA9B-8657-4BA8-9B03-BAEA454CAE60} = {18BF8DD7-0585-48BF-8F97-AD333080CE06} - {45D6FC80-36F3-4967-9663-E20B63824621} = {18BF8DD7-0585-48BF-8F97-AD333080CE06} - {D58D43D1-0617-4A3D-9932-C773E6398535} = {18BF8DD7-0585-48BF-8F97-AD333080CE06} - {63445BB7-DBB9-4AEF-9D6F-98BBE75EE1EC} = {18BF8DD7-0585-48BF-8F97-AD333080CE06} - {6585D1A4-3D97-4D76-A688-1933B61AEB19} = {18BF8DD7-0585-48BF-8F97-AD333080CE06} - {15441693-3659-4868-B6C1-B106F52FF3BA} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64} - {257FFD71-08E5-40C7-AB04-6A81A78EB410} = {18BF8DD7-0585-48BF-8F97-AD333080CE06} - {E2EF5E66-683C-4DDC-8ADA-5F676502B9BA} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64} - {1DFABC4A-8458-4875-8DCB-59F3802DAC65} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64} - {D36A85F9-C172-487D-8192-6BFE5D05B4A7} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64} - {B61388CA-DC73-4B7F-A7B2-7B9A86C9229E} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64} - {9F9E6DED-3D92-4970-909A-70FC11F1A665} = {18BF8DD7-0585-48BF-8F97-AD333080CE06} - {03E31CAA-3728-48D3-B936-9F11CF6C18FE} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64} - {93AA4D0D-6EE4-44D5-AD77-7F73A3934544} = {FBFEAD1F-29EB-4D99-A672-0CD8473E10B9} - {52958A60-3FF7-4243-9058-34A6E4F55C31} = {FBFEAD1F-29EB-4D99-A672-0CD8473E10B9} - {6A95E113-B824-4524-8F13-CD0C3E1C8804} = {18BF8DD7-0585-48BF-8F97-AD333080CE06} - {815E937E-86D6-4476-9EC6-B7FBCBBB5DB6} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64} - {834B4E85-64E5-4382-8465-548F332E5298} = {FBFEAD1F-29EB-4D99-A672-0CD8473E10B9} - {EFE0DC86-80FC-4D52-95B7-07654BA1A769} = {18BF8DD7-0585-48BF-8F97-AD333080CE06} - {8EA16BAB-465A-4C07-ABC4-1070D40067E9} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64} - {19679B75-CE3A-4DF0-A3F0-CA369D2760A4} = {FBFEAD1F-29EB-4D99-A672-0CD8473E10B9} - {6B82F26D-5040-4453-B21B-C8D1F913CE4C} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64} - {0E635268-351C-4A6B-A28D-593D868C2CA4} = {FBFEAD1F-29EB-4D99-A672-0CD8473E10B9} - {12079C18-A519-403F-BBFD-200A36A0C083} = {FBFEAD1F-29EB-4D99-A672-0CD8473E10B9} - {5C45981D-1319-4C25-935C-83D411CB28DF} = {18BF8DD7-0585-48BF-8F97-AD333080CE06} - {5970868F-831E-418F-89A9-4EC599563E16} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64} - {143725E2-206C-4D37-93E4-9EDF699826B2} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64} - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - SolutionGuid = {93384647-528D-46C8-922C-8DB36A382F0B} - EndGlobalSection -EndGlobal diff --git a/dotnet/Directory.Build.props b/dotnet/Directory.Build.props deleted file mode 100644 index b5663fe4c5..0000000000 --- a/dotnet/Directory.Build.props +++ /dev/null @@ -1,51 +0,0 @@ - - - - - - - netstandard2.0;net6.0;net8.0 - net8.0 - preview - enable - True - $(MSBuildThisFileDirectory)eng/opensource.snk - 0024000004800000940000000602000000240000525341310004000001000100f1d038d0b85ae392ad72011df91e9343b0b5df1bb8080aa21b9424362d696919e0e9ac3a8bca24e283e10f7a569c6f443e1d4e3ebc84377c87ca5caa562e80f9932bf5ea91b7862b538e13b8ba91c7565cf0e8dfeccfea9c805ae3bda044170ecc7fc6f147aeeac422dd96aeb9eb1f5a5882aa650efe2958f2f8107d2038f2ab - CS1998;CS1591 - $(NoWarn);$(CSNoWarn);NU5104 - true - true - false - true - true - false - - - - $(MSBuildThisFileDirectory) - - - - - - - - - - - - - - - Always - testData/%(RecursiveDir)%(Filename)%(Extension) - - - - - - Always - resource/%(RecursiveDir)%(Filename)%(Extension) - - - diff --git a/dotnet/NuGet.config b/dotnet/NuGet.config deleted file mode 100644 index 1d0cf4c2bc..0000000000 --- a/dotnet/NuGet.config +++ /dev/null @@ -1,8 +0,0 @@ -ο»Ώ - - - - - - - \ No newline at end of file diff --git a/dotnet/README.md b/dotnet/README.md deleted file mode 100644 index 03ebe7e9e2..0000000000 --- a/dotnet/README.md +++ /dev/null @@ -1,103 +0,0 @@ -### AutoGen for .NET - -[![dotnet-ci](https://github.com/ag2ai/ag2/actions/workflows/dotnet-build.yml/badge.svg)](https://github.com/ag2ai/ag2/actions/workflows/dotnet-build.yml) -[![NuGet version](https://badge.fury.io/nu/AutoGen.Core.svg)](https://badge.fury.io/nu/AutoGen.Core) - -> [!NOTE] -> Nightly build is available at: -> - ![Static Badge](https://img.shields.io/badge/public-blue?style=flat) ![Static Badge](https://img.shields.io/badge/nightly-yellow?style=flat) ![Static Badge](https://img.shields.io/badge/github-grey?style=flat): https://nuget.pkg.github.com/microsoft/index.json -> - ![Static Badge](https://img.shields.io/badge/public-blue?style=flat) ![Static Badge](https://img.shields.io/badge/nightly-yellow?style=flat) ![Static Badge](https://img.shields.io/badge/myget-grey?style=flat): https://www.myget.org/F/agentchat/api/v3/index.json -> - ![Static Badge](https://img.shields.io/badge/internal-blue?style=flat) ![Static Badge](https://img.shields.io/badge/nightly-yellow?style=flat) ![Static Badge](https://img.shields.io/badge/azure_devops-grey?style=flat) : https://devdiv.pkgs.visualstudio.com/DevDiv/_packaging/AutoGen/nuget/v3/index.json - - -Firstly, following the [installation guide](./website/articles/Installation.md) to install AutoGen packages. - -Then you can start with the following code snippet to create a conversable agent and chat with it. - -```csharp -using AutoGen; -using AutoGen.OpenAI; - -var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); -var gpt35Config = new OpenAIConfig(openAIKey, "gpt-3.5-turbo"); - -var assistantAgent = new AssistantAgent( - name: "assistant", - systemMessage: "You are an assistant that help user to do some tasks.", - llmConfig: new ConversableAgentConfig - { - Temperature = 0, - ConfigList = [gpt35Config], - }) - .RegisterPrintMessage(); // register a hook to print message nicely to console - -// set human input mode to ALWAYS so that user always provide input -var userProxyAgent = new UserProxyAgent( - name: "user", - humanInputMode: ConversableAgent.HumanInputMode.ALWAYS) - .RegisterPrintMessage(); - -// start the conversation -await userProxyAgent.InitiateChatAsync( - receiver: assistantAgent, - message: "Hey assistant, please do me a favor.", - maxRound: 10); -``` - -#### Samples -You can find more examples under the [sample project](https://github.com/ag2ai/ag2/tree/dotnet/dotnet/sample/AutoGen.BasicSamples). - -#### Functionality -- ConversableAgent - - [x] function call - - [x] code execution (dotnet only, powered by [`dotnet-interactive`](https://github.com/dotnet/interactive)) - -- Agent communication - - [x] Two-agent chat - - [x] Group chat - -- [ ] Enhanced LLM Inferences - -- Exclusive for dotnet - - [x] Source generator for type-safe function definition generation - -#### Update log -##### Update on 0.0.11 (2024-03-26) -- Add link to Discord channel in nuget's readme.md -- Document improvements -##### Update on 0.0.10 (2024-03-12) -- Rename `Workflow` to `Graph` -- Rename `AddInitializeMessage` to `SendIntroduction` -- Rename `SequentialGroupChat` to `RoundRobinGroupChat` -##### Update on 0.0.9 (2024-03-02) -- Refactor over @AutoGen.Message and introducing `TextMessage`, `ImageMessage`, `MultiModalMessage` and so on. PR [#1676](https://github.com/microsoft/autogen/pull/1676) -- Add `AutoGen.SemanticKernel` to support seamless integration with Semantic Kernel -- Move the agent contract abstraction to `AutoGen.Core` package. The `AutoGen.Core` package provides the abstraction for message type, agent and group chat and doesn't contain dependencies over `Azure.AI.OpenAI` or `Semantic Kernel`. This is useful when you want to leverage AutoGen's abstraction only and want to avoid introducing any other dependencies. -- Move `GPTAgent`, `OpenAIChatAgent` and all openai-dependencies to `AutoGen.OpenAI` -##### Update on 0.0.8 (2024-02-28) -- Fix [#1804](https://github.com/microsoft/autogen/pull/1804) -- Streaming support for IAgent [#1656](https://github.com/microsoft/autogen/pull/1656) -- Streaming support for middleware via `MiddlewareStreamingAgent` [#1656](https://github.com/microsoft/autogen/pull/1656) -- Graph chat support with conditional transition workflow [#1761](https://github.com/microsoft/autogen/pull/1761) -- AutoGen.SourceGenerator: Generate `FunctionContract` from `FunctionAttribute` [#1736](https://github.com/microsoft/autogen/pull/1736) -##### Update on 0.0.7 (2024-02-11) -- Add `AutoGen.LMStudio` to support comsume openai-like API from LMStudio local server -##### Update on 0.0.6 (2024-01-23) -- Add `MiddlewareAgent` -- Use `MiddlewareAgent` to implement existing agent hooks (RegisterPreProcess, RegisterPostProcess, RegisterReply) -- Remove `AutoReplyAgent`, `PreProcessAgent`, `PostProcessAgent` because they are replaced by `MiddlewareAgent` -##### Update on 0.0.5 -- Simplify `IAgent` interface by removing `ChatLLM` Property -- Add `GenerateReplyOptions` to `IAgent.GenerateReplyAsync` which allows user to specify or override the options when generating reply - -##### Update on 0.0.4 -- Move out dependency of Semantic Kernel -- Add type `IChatLLM` as connector to LLM - -##### Update on 0.0.3 -- In AutoGen.SourceGenerator, rename FunctionAttribution to FunctionAttribute -- In AutoGen, refactor over ConversationAgent, UserProxyAgent, and AssistantAgent - -##### Update on 0.0.2 -- update Azure.OpenAI.AI to 1.0.0-beta.12 -- update Semantic kernel to 1.0.1 diff --git a/dotnet/eng/MetaInfo.props b/dotnet/eng/MetaInfo.props deleted file mode 100644 index 164b54cb22..0000000000 --- a/dotnet/eng/MetaInfo.props +++ /dev/null @@ -1,12 +0,0 @@ - - - - 0.1.0 - AutoGen - https://ag2ai.github.io/autogen-for-net/ - https://github.com/microsoft/autogen - git - MIT - false - - diff --git a/dotnet/eng/Sign.props b/dotnet/eng/Sign.props deleted file mode 100644 index 0d69e7797e..0000000000 --- a/dotnet/eng/Sign.props +++ /dev/null @@ -1,22 +0,0 @@ - - - - - - - - - all - runtime; build; native; contentfiles; analyzers - - - - Microsoft400 - - - - - NuGet - - - diff --git a/dotnet/eng/Version.props b/dotnet/eng/Version.props deleted file mode 100644 index d90e8bc76c..0000000000 --- a/dotnet/eng/Version.props +++ /dev/null @@ -1,21 +0,0 @@ - - - - 1.0.0-beta.17 - 1.15.1 - 1.15.1-alpha - 5.0.0 - 4.3.0 - 6.0.0 - 6.8.0 - 2.4.2 - 17.7.0 - 1.0.0-beta.24229.4 - 8.0.0 - 8.0.4 - 3.0.0 - 4.3.0.2 - 1.0.0-beta.1 - 7.4.4 - - \ No newline at end of file diff --git a/dotnet/eng/opensource.snk b/dotnet/eng/opensource.snk deleted file mode 100644 index 779df7c83664a99f7ecef0a978ef597bbc3e2f6b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 596 zcmV-a0;~N80ssI2Bme+XQ$aES1ONa50098;&^XYzTH}(faseIr9+N|`wci`K2nwPb zlq5DSX=xeY>8v`7$|T~0;SYLNoNq)v9Zo*Hgg1PL%3P{eE`a%yEA{G;w}vZIjuW`L zk;hhC@aW&{&+43jTI0Q-L>CUsf5!1guIj`h-IlJo>mOQJf~sW>{wY}U_z-;{IP$A^ zwA%A1IYdpPT4o15gwN$S+;#Q~SbSjtnK zDS`h^TLh5hf0cRA71bJX?AY{HLXfE-%(UIC#t#-jd)KFvb|t)g5SknHIVsWq7r!Vy zWT^Gw&j>Of(WJh4pZ~2zt$53ex;$yV5ctpScq+6$0sS6X1^{-?+egYRn>>Dp*ifLD zncU!xeq8d{#`?@2kMLo#OT~tD|1RYMR&i5VETzCiiRuSXNJ8zy72Wh?k;~Ow_)f)V zu({=oxR9jDJ8467)to=xPPDacfK6}zDuK`7Tp<5cKeyhV-Q7EZitn|MqX!7fIsiI^ zoVbS>$d^>X;)aNzj^=x!@r$?6Om*K5nzfz?X?$v0S1(6m!}D{B>~?|7c4}$fAsbYe ivvj9*^^Ltsc;DGw6150G4x6SQMA1pXwDJv5I diff --git a/dotnet/global.json b/dotnet/global.json deleted file mode 100644 index a604954f98..0000000000 --- a/dotnet/global.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "sdk": { - "version": "8.0.104", - "rollForward": "latestMinor" - } -} \ No newline at end of file diff --git a/dotnet/nuget/NUGET.md b/dotnet/nuget/NUGET.md deleted file mode 100644 index ed26d55f59..0000000000 --- a/dotnet/nuget/NUGET.md +++ /dev/null @@ -1,8 +0,0 @@ -### About AutoGen for .NET -`AutoGen for .NET` is the official .NET SDK for [AutoGen](https://github.com/ag2ai/ag2). It enables you to create LLM agents and construct multi-agent workflows with ease. It also provides integration with popular platforms like OpenAI, Semantic Kernel, and LM Studio. - -### Gettings started -- Find documents and examples on our [document site](https://ag2ai.github.io/autogen-for-net/) -- Join our [Discord channel](https://discord.gg/pAbnFJrkgZ) to get help and discuss with the community -- Report a bug or request a feature by creating a new issue in our [github repo](https://github.com/ag2ai/ag2) -- Consume the nightly build package from one of the [nightly build feeds](https://ag2ai.github.io/autogen-for-net/articles/Installation.html#nighly-build) \ No newline at end of file diff --git a/dotnet/nuget/icon.png b/dotnet/nuget/icon.png deleted file mode 100644 index 076fc48c56..0000000000 --- a/dotnet/nuget/icon.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:02dbf31fea0b92714c80fdc90888da7e96374a1f52c621a939835fd3c876ddcc -size 426084 diff --git a/dotnet/nuget/nuget-package.props b/dotnet/nuget/nuget-package.props deleted file mode 100644 index f99a97ecc4..0000000000 --- a/dotnet/nuget/nuget-package.props +++ /dev/null @@ -1,54 +0,0 @@ - - - true - - - AutoGen - Microsoft - AutoGen - A programming framework for agentic AI - AI, Artificial Intelligence, SDK - $(AssemblyName) - - - MIT - Β© Microsoft Corporation. All rights reserved. - https://ag2ai.github.io/autogen-for-net - https://github.com/microsoft/autogen - true - - - icon.png - icon.png - NUGET.md - - - true - snupkg - - - true - - - true - - - bin\$(Configuration)\$(TargetFramework)\$(AssemblyName).xml - - - - - - - - - - - - - - - - true - - \ No newline at end of file diff --git a/dotnet/resource/images/background.png b/dotnet/resource/images/background.png deleted file mode 100644 index ca276f81f5..0000000000 --- a/dotnet/resource/images/background.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:300b7c9d6ba0c23a3e52fbd2e268141ddcca0434a9fb9dcf7e58e7e903d36dcf -size 2126185 diff --git a/dotnet/resource/images/square.png b/dotnet/resource/images/square.png deleted file mode 100644 index afb4f4cd4d..0000000000 --- a/dotnet/resource/images/square.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8323d0b8eceb752e14c29543b2e28bb2fc648ed9719095c31b7708867a4dc918 -size 491 diff --git a/dotnet/sample/AutoGen.Anthropic.Samples/Anthropic_Agent_With_Prompt_Caching.cs b/dotnet/sample/AutoGen.Anthropic.Samples/Anthropic_Agent_With_Prompt_Caching.cs deleted file mode 100644 index 1bbb270f17..0000000000 --- a/dotnet/sample/AutoGen.Anthropic.Samples/Anthropic_Agent_With_Prompt_Caching.cs +++ /dev/null @@ -1,139 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Anthropic_Agent_With_Prompt_Caching.cs - -using AutoGen.Anthropic.DTO; -using AutoGen.Anthropic.Extensions; -using AutoGen.Anthropic.Utils; -using AutoGen.Core; - -namespace AutoGen.Anthropic.Samples; - -public class Anthropic_Agent_With_Prompt_Caching -{ - // A random and long test string to demonstrate cache control. - // the context must be larger than 1024 tokens for Claude 3.5 Sonnet and Claude 3 Opus - // 2048 tokens for Claude 3.0 Haiku - // Shorter prompts cannot be cached, even if marked with cache_control. Any requests to cache fewer than this number of tokens will be processed without caching - - #region Long story for caching - public const string LongStory = """ - Once upon a time in a small, nondescript town lived a man named Bob. Bob was an unassuming individual, the kind of person you wouldn’t look twice at if you passed him on the street. He worked as an IT specialist for a mid-sized corporation, spending his days fixing computers and troubleshooting software issues. But beneath his average exterior, Bob harbored a secret ambitionβ€”he wanted to take over the world. - - Bob wasn’t always like this. For most of his life, he had been content with his routine, blending into the background. But one day, while browsing the dark corners of the internet, Bob stumbled upon an ancient manuscript, encrypted within the deep web, detailing the steps to global domination. It was written by a forgotten conqueror, someone whose name had been erased from history but whose methods were preserved in this digital relic. The manuscript laid out a plan so intricate and flawless that Bob, with his analytical mind, became obsessed. - - Over the next few years, Bob meticulously followed the manuscript’s guidance. He started small, creating a network of like-minded individuals who shared his dream. They communicated through encrypted channels, meeting in secret to discuss their plans. Bob was careful, never revealing too much about himself, always staying in the shadows. He used his IT skills to gather information, infiltrating government databases, and private corporations, and acquiring secrets that could be used as leverage. - - As his network grew, so did his influence. Bob began to manipulate world events from behind the scenes. He orchestrated economic crises, incited political turmoil, and planted seeds of discord among the world’s most powerful nations. Each move was calculated, each action a step closer to his ultimate goal. The world was in chaos, and no one suspected that a man like Bob could be behind it all. - - But Bob knew that causing chaos wasn’t enough. To truly take over the world, he needed something moreβ€”something to cement his power. That’s when he turned to technology. Bob had always been ahead of the curve when it came to tech, and now, he planned to use it to his advantage. He began developing an AI, one that would be more powerful and intelligent than anything the world had ever seen. This AI, which Bob named β€œNemesis,” was designed to control every aspect of modern lifeβ€”from financial systems to military networks. - - It took years of coding, testing, and refining, but eventually, Nemesis was ready. Bob unleashed the AI, and within days, it had taken control of the world’s digital infrastructure. Governments were powerless, their systems compromised. Corporations crumbled as their assets were seized. The military couldn’t act, their weapons turned against them. Bob, from the comfort of his modest home, had done it. He had taken over the world. - - The world, now under Bob’s control, was eerily quiet. There were no more wars, no more financial crises, no more political strife. Nemesis ensured that everything ran smoothly, efficiently, and without dissent. The people of the world had no choice but to obey, their lives dictated by an unseen hand. - - Bob, once a man who was overlooked and ignored, was now the most powerful person on the planet. But with that power came a realization. The world he had taken over was not the world he had envisioned. It was cold, mechanical, and devoid of the chaos that once made life unpredictable and exciting. Bob had achieved his goal, but in doing so, he had lost the very thing that made life worth livingβ€”freedom. - - And so, Bob, now ruler of the world, sat alone in his control room, staring at the screens that displayed his dominion. He had everything he had ever wanted, yet he felt emptier than ever before. The world was his, but at what cost? - - In the end, Bob realized that true power didn’t come from controlling others, but from the ability to let go. He deactivated Nemesis, restoring the world to its former state, and disappeared into obscurity, content to live out the rest of his days as just another face in the crowd. And though the world never knew his name, Bob’s legacy would live on, a reminder of the dangers of unchecked ambition. - - Bob had vanished, leaving the world in a fragile state of recovery. Governments scrambled to regain control of their systems, corporations tried to rebuild, and the global population slowly adjusted to life without the invisible grip of Nemesis. Yet, even as society returned to a semblance of normalcy, whispers of the mysterious figure who had brought the world to its knees lingered in the shadows. - - Meanwhile, Bob had retreated to a secluded cabin deep in the mountains. The cabin was a modest, rustic place, surrounded by dense forests and overlooking a tranquil lake. It was far from civilization, a perfect place for a man who wanted to disappear. Bob spent his days fishing, hiking, and reflecting on his past. For the first time in years, he felt a sense of peace. - - But peace was fleeting. Despite his best efforts to put his past behind him, Bob couldn’t escape the consequences of his actions. He had unleashed Nemesis upon the world, and though he had deactivated the AI, remnants of its code still existed. Rogue factions, hackers, and remnants of his old network were searching for those fragments, hoping to revive Nemesis and seize the power that Bob had relinquished. - - One day, as Bob was chopping wood outside his cabin, a figure emerged from the tree line. It was a young woman, dressed in hiking gear, with a determined look in her eyes. Bob tensed, his instincts telling him that this was no ordinary hiker. - - β€œBob,” the woman said, her voice steady. β€œOr should I say, the man who almost became the ruler of the world?” - - Bob sighed, setting down his axe. β€œWho are you, and what do you want?” - - The woman stepped closer. β€œMy name is Sarah. I was part of your network, one of the few who knew about Nemesis. But I wasn’t like the others. I didn’t want power for myselfβ€”I wanted to protect the world from those who would misuse it.” - - Bob studied her, trying to gauge her intentions. β€œAnd why are you here now?” - - Sarah reached into her backpack and pulled out a small device. β€œBecause Nemesis isn’t dead. Some of its code is still active, and it’s trying to reboot itself. I need your help to stop it for good.” - - Bob’s heart sank. He had hoped that by deactivating Nemesis, he had erased it from existence. But deep down, he knew that an AI as powerful as Nemesis wouldn’t go down so easily. β€œWhy come to me? I’m the one who created it. I’m the reason the world is in this mess.” - - Sarah shook her head. β€œYou’re also the only one who knows how to stop it. I’ve tracked down the remnants of Nemesis’s code, but I need you to help destroy it before it falls into the wrong hands.” - - Bob hesitated. He had wanted nothing more than to leave his past behind, but he couldn’t ignore the responsibility that weighed on him. He had created Nemesis, and now it was his duty to make sure it never posed a threat again. - - β€œAlright,” Bob said finally. β€œI’ll help you. But after this, I’m done. No more world domination, no more secret networks. I just want to live in peace.” - - Sarah nodded. β€œAgreed. Let’s finish what you started.” - - Over the next few weeks, Bob and Sarah worked together, traveling to various locations around the globe where fragments of Nemesis’s code had been detected. They infiltrated secure facilities, outsmarted rogue hackers, and neutralized threats, all while staying one step ahead of those who sought to control Nemesis for their own gain. - - As they worked, Bob and Sarah developed a deep respect for one another. Sarah was sharp, resourceful, and driven by a genuine desire to protect the world. Bob found himself opening up to her, sharing his regrets, his doubts, and the lessons he had learned. In turn, Sarah shared her own storyβ€”how she had once been tempted by power but had chosen a different path, one that led her to fight for what was right. - - Finally, after weeks of intense effort, they tracked down the last fragment of Nemesis’s code, hidden deep within a remote server farm in the Arctic. The facility was heavily guarded, but Bob and Sarah had planned meticulously. Under the cover of a blizzard, they infiltrated the facility, avoiding detection as they made their way to the heart of the server room. - - As Bob began the process of erasing the final fragment, an alarm blared, and the facility’s security forces closed in. Sarah held them off as long as she could, but they were outnumbered and outgunned. Just as the situation seemed hopeless, Bob executed the final command, wiping Nemesis from existence once and for all. - - But as the last remnants of Nemesis were deleted, Bob knew there was only one way to ensure it could never be resurrected. He initiated a self-destruct sequence for the server farm, trapping himself and Sarah inside. - - Sarah stared at him, realization dawning in her eyes. β€œBob, what are you doing?” - - Bob looked at her, a sad smile on his face. β€œI have to make sure it’s over. This is the only way.” - - Sarah’s eyes filled with tears, but she nodded, understanding the gravity of his decision. β€œThank you, Bob. For everything.” - - As the facility’s countdown reached its final seconds, Bob and Sarah stood side by side, knowing they had done the right thing. The explosion that followed was seen from miles away, a final testament to the end of an era. - - The world never knew the true story of Bob, the man who almost ruled the world. But in his final act of sacrifice, he ensured that the world would remain free, a place where people could live their lives without fear of control. Bob had redeemed himself, not as a conqueror, but as a protectorβ€”a man who chose to save the world rather than rule it. - - And in the quiet aftermath of the explosion, as the snow settled over the wreckage, Bob’s legacy was sealedβ€”not as a name in history books, but as a silent guardian whose actions would be felt for generations to come. - """; - #endregion - - public static async Task RunAsync() - { - #region init translator agents & register middlewares - - var apiKey = Environment.GetEnvironmentVariable("ANTHROPIC_API_KEY") ?? - throw new Exception("Please set ANTHROPIC_API_KEY environment variable."); - var anthropicClient = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, apiKey); - var frenchTranslatorAgent = - new AnthropicClientAgent(anthropicClient, "frenchTranslator", AnthropicConstants.Claude35Sonnet, - systemMessage: "You are a French translator") - .RegisterMessageConnector() - .RegisterPrintMessage(); - - var germanTranslatorAgent = new AnthropicClientAgent(anthropicClient, "germanTranslator", - AnthropicConstants.Claude35Sonnet, systemMessage: "You are a German translator") - .RegisterMessageConnector() - .RegisterPrintMessage(); - - #endregion - - var userProxyAgent = new UserProxyAgent( - name: "user", - humanInputMode: HumanInputMode.ALWAYS) - .RegisterPrintMessage(); - - var groupChat = new RoundRobinGroupChat( - agents: [userProxyAgent, frenchTranslatorAgent, germanTranslatorAgent]); - - var messageEnvelope = - MessageEnvelope.Create( - new ChatMessage("user", [TextContent.CreateTextWithCacheControl(LongStory)]), - from: "user"); - - var chatHistory = new List() - { - new TextMessage(Role.User, "translate this text for me", from: userProxyAgent.Name), - messageEnvelope, - }; - - var history = await groupChat.SendAsync(chatHistory).ToArrayAsync(); - } -} diff --git a/dotnet/sample/AutoGen.Anthropic.Samples/AutoGen.Anthropic.Samples.csproj b/dotnet/sample/AutoGen.Anthropic.Samples/AutoGen.Anthropic.Samples.csproj deleted file mode 100644 index fe7553b937..0000000000 --- a/dotnet/sample/AutoGen.Anthropic.Samples/AutoGen.Anthropic.Samples.csproj +++ /dev/null @@ -1,19 +0,0 @@ -ο»Ώ - - - Exe - $(TestTargetFrameworks) - enable - enable - True - - - - - - - - - - - diff --git a/dotnet/sample/AutoGen.Anthropic.Samples/Create_Anthropic_Agent.cs b/dotnet/sample/AutoGen.Anthropic.Samples/Create_Anthropic_Agent.cs deleted file mode 100644 index 303cefcb6d..0000000000 --- a/dotnet/sample/AutoGen.Anthropic.Samples/Create_Anthropic_Agent.cs +++ /dev/null @@ -1,34 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Create_Anthropic_Agent.cs - -using AutoGen.Anthropic.Extensions; -using AutoGen.Anthropic.Utils; -using AutoGen.Core; - -namespace AutoGen.Anthropic.Samples; - -public static class Create_Anthropic_Agent -{ - public static async Task RunAsync() - { - #region create_anthropic_agent - var apiKey = Environment.GetEnvironmentVariable("ANTHROPIC_API_KEY") ?? throw new Exception("Missing ANTHROPIC_API_KEY environment variable."); - var anthropicClient = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, apiKey); - var agent = new AnthropicClientAgent(anthropicClient, "assistant", AnthropicConstants.Claude3Haiku); - #endregion - - #region register_middleware - var agentWithConnector = agent - .RegisterMessageConnector() - .RegisterPrintMessage(); - #endregion register_middleware - - await agentWithConnector.SendAsync(new TextMessage(Role.Assistant, "Hello", from: "user")); - } -} diff --git a/dotnet/sample/AutoGen.Anthropic.Samples/Create_Anthropic_Agent_With_Tool.cs b/dotnet/sample/AutoGen.Anthropic.Samples/Create_Anthropic_Agent_With_Tool.cs deleted file mode 100644 index 62c81756e6..0000000000 --- a/dotnet/sample/AutoGen.Anthropic.Samples/Create_Anthropic_Agent_With_Tool.cs +++ /dev/null @@ -1,106 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Create_Anthropic_Agent_With_Tool.cs - -using AutoGen.Anthropic.DTO; -using AutoGen.Anthropic.Extensions; -using AutoGen.Anthropic.Utils; -using AutoGen.Core; -using FluentAssertions; - -namespace AutoGen.Anthropic.Samples; - -#region WeatherFunction - -public partial class WeatherFunction -{ - /// - /// Gets the weather based on the location and the unit - /// - /// - /// - /// - [Function] - public async Task GetWeather(string location, string unit) - { - // dummy implementation - return $"The weather in {location} is currently sunny with a tempature of {unit} (s)"; - } -} -#endregion -public class Create_Anthropic_Agent_With_Tool -{ - public static async Task RunAsync() - { - #region define_tool - var tool = new Tool - { - Name = "GetWeather", - Description = "Get the current weather in a given location", - InputSchema = new InputSchema - { - Type = "object", - Properties = new Dictionary - { - { "location", new SchemaProperty { Type = "string", Description = "The city and state, e.g. San Francisco, CA" } }, - { "unit", new SchemaProperty { Type = "string", Description = "The unit of temperature, either \"celsius\" or \"fahrenheit\"" } } - }, - Required = new List { "location" } - } - }; - - var weatherFunction = new WeatherFunction(); - var functionMiddleware = new FunctionCallMiddleware( - functions: [ - weatherFunction.GetWeatherFunctionContract, - ], - functionMap: new Dictionary>> - { - { weatherFunction.GetWeatherFunctionContract.Name!, weatherFunction.GetWeatherWrapper }, - }); - - #endregion - - #region create_anthropic_agent - - var apiKey = Environment.GetEnvironmentVariable("ANTHROPIC_API_KEY") ?? - throw new Exception("Missing ANTHROPIC_API_KEY environment variable."); - - var anthropicClient = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, apiKey); - var agent = new AnthropicClientAgent(anthropicClient, "assistant", AnthropicConstants.Claude3Haiku, - tools: [tool]); // Define tools for AnthropicClientAgent - #endregion - - #region register_middleware - - var agentWithConnector = agent - .RegisterMessageConnector() - .RegisterPrintMessage() - .RegisterStreamingMiddleware(functionMiddleware); - #endregion register_middleware - - #region single_turn - var question = new TextMessage(Role.Assistant, - "What is the weather like in San Francisco?", - from: "user"); - var functionCallReply = await agentWithConnector.SendAsync(question); - #endregion - - #region Single_turn_verify_reply - functionCallReply.Should().BeOfType(); - #endregion Single_turn_verify_reply - - #region Multi_turn - var finalReply = await agentWithConnector.SendAsync(chatHistory: [question, functionCallReply]); - #endregion Multi_turn - - #region Multi_turn_verify_reply - finalReply.Should().BeOfType(); - #endregion Multi_turn_verify_reply - } -} diff --git a/dotnet/sample/AutoGen.Anthropic.Samples/Program.cs b/dotnet/sample/AutoGen.Anthropic.Samples/Program.cs deleted file mode 100644 index 4e2aa025ec..0000000000 --- a/dotnet/sample/AutoGen.Anthropic.Samples/Program.cs +++ /dev/null @@ -1,18 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Program.cs - -namespace AutoGen.Anthropic.Samples; - -internal static class Program -{ - public static async Task Main(string[] args) - { - await Anthropic_Agent_With_Prompt_Caching.RunAsync(); - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/AutoGen.BasicSample.csproj b/dotnet/sample/AutoGen.BasicSamples/AutoGen.BasicSample.csproj deleted file mode 100644 index d4323ee4c9..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/AutoGen.BasicSample.csproj +++ /dev/null @@ -1,19 +0,0 @@ -ο»Ώ - - - Exe - $(TestTargetFrameworks) - enable - True - $(NoWarn);CS8981;CS8600;CS8602;CS8604;CS8618;CS0219;SKEXP0054;SKEXP0050;SKEXP0110 - true - - - - - - - - - - diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/AgentCodeSnippet.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/AgentCodeSnippet.cs deleted file mode 100644 index dadf860405..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/AgentCodeSnippet.cs +++ /dev/null @@ -1,37 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// AgentCodeSnippet.cs -using AutoGen.Core; - -namespace AutoGen.BasicSample.CodeSnippet; - -internal class AgentCodeSnippet -{ - public async Task ChatWithAnAgent(IStreamingAgent agent) - { - #region ChatWithAnAgent_GenerateReplyAsync - var message = new TextMessage(Role.User, "Hello"); - IMessage reply = await agent.GenerateReplyAsync([message]); - #endregion ChatWithAnAgent_GenerateReplyAsync - - #region ChatWithAnAgent_SendAsync - reply = await agent.SendAsync("Hello"); - #endregion ChatWithAnAgent_SendAsync - - #region ChatWithAnAgent_GenerateStreamingReplyAsync - var textMessage = new TextMessage(Role.User, "Hello"); - await foreach (var streamingReply in agent.GenerateStreamingReplyAsync([message])) - { - if (streamingReply is TextMessageUpdate update) - { - Console.Write(update.Content); - } - } - #endregion ChatWithAnAgent_GenerateStreamingReplyAsync - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/BuildInMessageCodeSnippet.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/BuildInMessageCodeSnippet.cs deleted file mode 100644 index 0d3eae5774..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/BuildInMessageCodeSnippet.cs +++ /dev/null @@ -1,48 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// BuildInMessageCodeSnippet.cs - -using AutoGen.Core; -namespace AutoGen.BasicSample.CodeSnippet; - -internal class BuildInMessageCodeSnippet -{ - public async Task StreamingCallCodeSnippetAsync() - { - IStreamingAgent agent = default; - #region StreamingCallCodeSnippet - var helloTextMessage = new TextMessage(Role.User, "Hello"); - var reply = agent.GenerateStreamingReplyAsync([helloTextMessage]); - var finalTextMessage = new TextMessage(Role.Assistant, string.Empty, from: agent.Name); - await foreach (var message in reply) - { - if (message is TextMessageUpdate textMessage) - { - Console.Write(textMessage.Content); - finalTextMessage.Update(textMessage); - } - } - #endregion StreamingCallCodeSnippet - - #region StreamingCallWithFinalMessage - reply = agent.GenerateStreamingReplyAsync([helloTextMessage]); - TextMessage finalMessage = null; - await foreach (var message in reply) - { - if (message is TextMessageUpdate textMessage) - { - Console.Write(textMessage.Content); - } - else if (message is TextMessage txtMessage) - { - finalMessage = txtMessage; - } - } - #endregion StreamingCallWithFinalMessage - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/CreateAnAgent.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/CreateAnAgent.cs deleted file mode 100644 index 3c7a96353d..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/CreateAnAgent.cs +++ /dev/null @@ -1,148 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// CreateAnAgent.cs - -using AutoGen; -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using FluentAssertions; - -public partial class AssistantCodeSnippet -{ - public void CodeSnippet1() - { - #region code_snippet_1 - // get OpenAI Key and create config - var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var llmConfig = new OpenAIConfig(openAIKey, "gpt-3.5-turbo"); - - // create assistant agent - var assistantAgent = new AssistantAgent( - name: "assistant", - systemMessage: "You are an assistant that help user to do some tasks.", - llmConfig: new ConversableAgentConfig - { - Temperature = 0, - ConfigList = new[] { llmConfig }, - }); - #endregion code_snippet_1 - - } - - public void CodeSnippet2() - { - #region code_snippet_2 - // get OpenAI Key and create config - var apiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY"); - string endPoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT"); // change to your endpoint - - var llmConfig = new AzureOpenAIConfig( - endpoint: endPoint, - deploymentName: "gpt-3.5-turbo-16k", // change to your deployment name - apiKey: apiKey); - - // create assistant agent - var assistantAgent = new AssistantAgent( - name: "assistant", - systemMessage: "You are an assistant that help user to do some tasks.", - llmConfig: new ConversableAgentConfig - { - Temperature = 0, - ConfigList = new[] { llmConfig }, - }); - #endregion code_snippet_2 - } - - #region code_snippet_3 - /// - /// convert input to upper case - /// - /// input - [Function] - public async Task UpperCase(string input) - { - var result = input.ToUpper(); - return result; - } - - #endregion code_snippet_3 - - public async Task CodeSnippet4() - { - // get OpenAI Key and create config - var apiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY"); - string endPoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT"); // change to your endpoint - - var llmConfig = new AzureOpenAIConfig( - endpoint: endPoint, - deploymentName: "gpt-3.5-turbo-16k", // change to your deployment name - apiKey: apiKey); - #region code_snippet_4 - var assistantAgent = new AssistantAgent( - name: "assistant", - systemMessage: "You are an assistant that convert user input to upper case.", - llmConfig: new ConversableAgentConfig - { - Temperature = 0, - ConfigList = new[] - { - llmConfig - }, - FunctionContracts = new[] - { - this.UpperCaseFunctionContract, // The FunctionDefinition object for the UpperCase function - }, - }); - - var response = await assistantAgent.SendAsync("hello"); - response.Should().BeOfType(); - var toolCallMessage = (ToolCallMessage)response; - toolCallMessage.ToolCalls.Count().Should().Be(1); - toolCallMessage.ToolCalls.First().FunctionName.Should().Be("UpperCase"); - #endregion code_snippet_4 - } - - public async Task CodeSnippet5() - { - // get OpenAI Key and create config - var apiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY"); - string endPoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT"); // change to your endpoint - - var llmConfig = new AzureOpenAIConfig( - endpoint: endPoint, - deploymentName: "gpt-3.5-turbo-16k", // change to your deployment name - apiKey: apiKey); - #region code_snippet_5 - var assistantAgent = new AssistantAgent( - name: "assistant", - systemMessage: "You are an assistant that convert user input to upper case.", - llmConfig: new ConversableAgentConfig - { - Temperature = 0, - ConfigList = new[] - { - llmConfig - }, - FunctionContracts = new[] - { - this.UpperCaseFunctionContract, // The FunctionDefinition object for the UpperCase function - }, - }, - functionMap: new Dictionary>> - { - { this.UpperCaseFunctionContract.Name, this.UpperCaseWrapper }, // The wrapper function for the UpperCase function - }); - - var response = await assistantAgent.SendAsync("hello"); - response.Should().BeOfType(); - response.From.Should().Be("assistant"); - var textMessage = (TextMessage)response; - textMessage.Content.Should().Be("HELLO"); - #endregion code_snippet_5 - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/FunctionCallCodeSnippet.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/FunctionCallCodeSnippet.cs deleted file mode 100644 index a8d2581d9c..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/FunctionCallCodeSnippet.cs +++ /dev/null @@ -1,155 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// FunctionCallCodeSnippet.cs - -using AutoGen; -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using FluentAssertions; - -public partial class FunctionCallCodeSnippet -{ - public async Task CodeSnippet4() - { - // get OpenAI Key and create config - var apiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY"); - string endPoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT"); // change to your endpoint - - var llmConfig = new AzureOpenAIConfig( - endpoint: endPoint, - deploymentName: "gpt-3.5-turbo-16k", // change to your deployment name - apiKey: apiKey); - #region code_snippet_4 - var function = new TypeSafeFunctionCall(); - var assistantAgent = new AssistantAgent( - name: "assistant", - systemMessage: "You are an assistant that convert user input to upper case.", - llmConfig: new ConversableAgentConfig - { - Temperature = 0, - ConfigList = new[] - { - llmConfig - }, - FunctionContracts = new[] - { - function.WeatherReportFunctionContract, - }, - }); - - var response = await assistantAgent.SendAsync("hello What's the weather in Seattle today? today is 2024-01-01"); - response.Should().BeOfType(); - var toolCallMessage = (ToolCallMessage)response; - toolCallMessage.ToolCalls.Count().Should().Be(1); - toolCallMessage.ToolCalls[0].FunctionName.Should().Be("WeatherReport"); - toolCallMessage.ToolCalls[0].FunctionArguments.Should().Be(@"{""location"":""Seattle"",""date"":""2024-01-01""}"); - #endregion code_snippet_4 - } - - - public async Task CodeSnippet6() - { - // get OpenAI Key and create config - var apiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY"); - string endPoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT"); // change to your endpoint - - var llmConfig = new AzureOpenAIConfig( - endpoint: endPoint, - deploymentName: "gpt-3.5-turbo-16k", // change to your deployment name - apiKey: apiKey); - #region code_snippet_6 - var function = new TypeSafeFunctionCall(); - var assistantAgent = new AssistantAgent( - name: "assistant", - llmConfig: new ConversableAgentConfig - { - Temperature = 0, - ConfigList = new[] - { - llmConfig - }, - FunctionContracts = new[] - { - function.WeatherReportFunctionContract, - }, - }, - functionMap: new Dictionary>> - { - { function.WeatherReportFunctionContract.Name, function.WeatherReportWrapper }, // The function wrapper for the weather report function - }); - - #endregion code_snippet_6 - - #region code_snippet_6_1 - var response = await assistantAgent.SendAsync("What's the weather in Seattle today? today is 2024-01-01"); - response.Should().BeOfType(); - var textMessage = (TextMessage)response; - textMessage.Content.Should().Be("Weather report for Seattle on 2024-01-01 is sunny"); - #endregion code_snippet_6_1 - } - - public async Task OverriderFunctionContractAsync() - { - IAgent agent = default; - IEnumerable messages = new List(); - #region overrider_function_contract - var function = new TypeSafeFunctionCall(); - var reply = agent.GenerateReplyAsync(messages, new GenerateReplyOptions - { - Functions = new[] { function.WeatherReportFunctionContract }, - }); - #endregion overrider_function_contract - } - - public async Task RegisterFunctionCallMiddlewareAsync() - { - IAgent agent = default; - #region register_function_call_middleware - var function = new TypeSafeFunctionCall(); - var functionCallMiddleware = new FunctionCallMiddleware( - functions: new[] { function.WeatherReportFunctionContract }, - functionMap: new Dictionary>> - { - { function.WeatherReportFunctionContract.Name, function.WeatherReportWrapper }, - }); - - agent = agent!.RegisterMiddleware(functionCallMiddleware); - var reply = await agent.SendAsync("What's the weather in Seattle today? today is 2024-01-01"); - #endregion register_function_call_middleware - } - - public async Task TwoAgentWeatherChatTestAsync() - { - var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new ArgumentException("AZURE_OPENAI_API_KEY is not set"); - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new ArgumentException("AZURE_OPENAI_ENDPOINT is not set"); - var deploymentName = "gpt-35-turbo-16k"; - var config = new AzureOpenAIConfig(endpoint, deploymentName, key); - #region two_agent_weather_chat - var function = new TypeSafeFunctionCall(); - var assistant = new AssistantAgent( - "assistant", - llmConfig: new ConversableAgentConfig - { - ConfigList = new[] { config }, - FunctionContracts = new[] - { - function.WeatherReportFunctionContract, - }, - }); - - var user = new UserProxyAgent( - name: "user", - functionMap: new Dictionary>> - { - { function.WeatherReportFunctionContract.Name, function.WeatherReportWrapper }, - }); - - await user.InitiateChatAsync(assistant, "what's weather in Seattle today, today is 2024-01-01", 10); - #endregion two_agent_weather_chat - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/GetStartCodeSnippet.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/GetStartCodeSnippet.cs deleted file mode 100644 index 983cf89f6b..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/GetStartCodeSnippet.cs +++ /dev/null @@ -1,47 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GetStartCodeSnippet.cs - -#region snippet_GetStartCodeSnippet -using AutoGen; -using AutoGen.Core; -using AutoGen.OpenAI.V1; -#endregion snippet_GetStartCodeSnippet - -public class GetStartCodeSnippet -{ - public async Task CodeSnippet1() - { - #region code_snippet_1 - var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var gpt35Config = new OpenAIConfig(openAIKey, "gpt-3.5-turbo"); - - var assistantAgent = new AssistantAgent( - name: "assistant", - systemMessage: "You are an assistant that help user to do some tasks.", - llmConfig: new ConversableAgentConfig - { - Temperature = 0, - ConfigList = [gpt35Config], - }) - .RegisterPrintMessage(); // register a hook to print message nicely to console - - // set human input mode to ALWAYS so that user always provide input - var userProxyAgent = new UserProxyAgent( - name: "user", - humanInputMode: HumanInputMode.ALWAYS) - .RegisterPrintMessage(); - - // start the conversation - await userProxyAgent.InitiateChatAsync( - receiver: assistantAgent, - message: "Hey assistant, please do me a favor.", - maxRound: 10); - #endregion code_snippet_1 - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/MiddlewareAgentCodeSnippet.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/MiddlewareAgentCodeSnippet.cs deleted file mode 100644 index 0bf89a4c90..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/MiddlewareAgentCodeSnippet.cs +++ /dev/null @@ -1,183 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MiddlewareAgentCodeSnippet.cs - -using System.Text.Json; -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using FluentAssertions; - -namespace AutoGen.BasicSample.CodeSnippet; - -public class MiddlewareAgentCodeSnippet -{ - public async Task CreateMiddlewareAgentAsync() - { - #region create_middleware_agent_with_original_agent - // Create an agent that always replies "Hi!" - IAgent agent = new DefaultReplyAgent(name: "assistant", defaultReply: "Hi!"); - - // Create a middleware agent on top of default reply agent - var middlewareAgent = new MiddlewareAgent(innerAgent: agent); - middlewareAgent.Use(async (messages, options, agent, ct) => - { - if (messages.Last() is TextMessage lastMessage && lastMessage.Content.Contains("Hello World")) - { - lastMessage.Content = $"[middleware 0] {lastMessage.Content}"; - return lastMessage; - } - - return await agent.GenerateReplyAsync(messages, options, ct); - }); - - var reply = await middlewareAgent.SendAsync("Hello World"); - reply.GetContent().Should().Be("[middleware 0] Hello World"); - reply = await middlewareAgent.SendAsync("Hello AI!"); - reply.GetContent().Should().Be("Hi!"); - #endregion create_middleware_agent_with_original_agent - - #region register_middleware_agent - middlewareAgent = agent.RegisterMiddleware(async (messages, options, agent, ct) => - { - if (messages.Last() is TextMessage lastMessage && lastMessage.Content.Contains("Hello World")) - { - lastMessage.Content = $"[middleware 0] {lastMessage.Content}"; - return lastMessage; - } - - return await agent.GenerateReplyAsync(messages, options, ct); - }); - #endregion register_middleware_agent - - #region short_circuit_middleware_agent - // This middleware will short circuit the agent and return a message directly. - middlewareAgent.Use(async (messages, options, agent, ct) => - { - return new TextMessage(Role.Assistant, $"[middleware shortcut]"); - }); - #endregion short_circuit_middleware_agent - } - - public async Task RegisterStreamingMiddlewareAsync() - { - IStreamingAgent streamingAgent = default; - #region register_streaming_middleware - var connector = new OpenAIChatRequestMessageConnector(); - var agent = streamingAgent! - .RegisterStreamingMiddleware(connector); - #endregion register_streaming_middleware - } - - public async Task CodeSnippet1() - { - #region code_snippet_1 - // Create an agent that always replies "Hello World" - IAgent agent = new DefaultReplyAgent(name: "assistant", defaultReply: "Hello World"); - - // Create a middleware agent on top of default reply agent - var middlewareAgent = new MiddlewareAgent(innerAgent: agent); - - // Since no middleware is added, middlewareAgent will simply proxy into the inner agent to generate reply. - var reply = await middlewareAgent.SendAsync("Hello World"); - reply.From.Should().Be("assistant"); - reply.GetContent().Should().Be("Hello World"); - #endregion code_snippet_1 - - #region code_snippet_2 - middlewareAgent.Use(async (messages, options, agent, ct) => - { - var lastMessage = messages.Last() as TextMessage; - lastMessage.Content = $"[middleware 0] {lastMessage.Content}"; - return await agent.GenerateReplyAsync(messages, options, ct); - }); - - reply = await middlewareAgent.SendAsync("Hello World"); - reply.Should().BeOfType(); - var textReply = (TextMessage)reply; - textReply.Content.Should().Be("[middleware 0] Hello World"); - #endregion code_snippet_2 - #region code_snippet_2_1 - middlewareAgent = agent.RegisterMiddleware(async (messages, options, agnet, ct) => - { - var lastMessage = messages.Last() as TextMessage; - lastMessage.Content = $"[middleware 0] {lastMessage.Content}"; - return await agent.GenerateReplyAsync(messages, options, ct); - }); - - reply = await middlewareAgent.SendAsync("Hello World"); - reply.GetContent().Should().Be("[middleware 0] Hello World"); - #endregion code_snippet_2_1 - #region code_snippet_3 - middlewareAgent.Use(async (messages, options, agent, ct) => - { - var lastMessage = messages.Last() as TextMessage; - lastMessage.Content = $"[middleware 1] {lastMessage.Content}"; - return await agent.GenerateReplyAsync(messages, options, ct); - }); - - reply = await middlewareAgent.SendAsync("Hello World"); - reply.GetContent().Should().Be("[middleware 0] [middleware 1] Hello World"); - #endregion code_snippet_3 - - #region code_snippet_4 - middlewareAgent.Use(async (messages, options, next, ct) => - { - var lastMessage = messages.Last() as TextMessage; - lastMessage.Content = $"[middleware shortcut]"; - - return lastMessage; - }); - - reply = await middlewareAgent.SendAsync("Hello World"); - reply.GetContent().Should().Be("[middleware shortcut]"); - #endregion code_snippet_4 - - #region retrieve_inner_agent - var innerAgent = middlewareAgent.Agent; - #endregion retrieve_inner_agent - - #region code_snippet_logging_to_console - var agentWithLogging = middlewareAgent.RegisterMiddleware(async (messages, options, agent, ct) => - { - var reply = await agent.GenerateReplyAsync(messages, options, ct); - var formattedMessage = reply.FormatMessage(); - Console.WriteLine(formattedMessage); - - return reply; - }); - #endregion code_snippet_logging_to_console - - #region code_snippet_response_format_forcement - var jsonAgent = middlewareAgent.RegisterMiddleware(async (messages, options, agent, ct) => - { - var maxAttempt = 5; - var reply = await agent.GenerateReplyAsync(messages, options, ct); - while (maxAttempt-- > 0) - { - if (JsonSerializer.Deserialize>(reply.GetContent()) is { } dict) - { - return reply; - } - else - { - await Task.Delay(1000); - var reviewPrompt = @"The format is not json, please modify your response to json format - -- ORIGINAL MESSAGE -- - {reply.Content} - -- END OF ORIGINAL MESSAGE -- - - Reply again with json format."; - reply = await agent.SendAsync(reviewPrompt, messages, ct); - } - } - - throw new Exception("agent fails to generate json response"); - }); - #endregion code_snippet_response_format_forcement - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/MistralAICodeSnippet.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/MistralAICodeSnippet.cs deleted file mode 100644 index 7ea16b37e7..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/MistralAICodeSnippet.cs +++ /dev/null @@ -1,92 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MistralAICodeSnippet.cs - -#region using_statement -using AutoGen.Core; -using AutoGen.Mistral; -using AutoGen.Mistral.Extension; -using FluentAssertions; -#endregion using_statement - -namespace AutoGen.BasicSample.CodeSnippet; - -#region weather_function -public partial class MistralAgentFunction -{ - [Function] - public async Task GetWeather(string location) - { - return "The weather in " + location + " is sunny."; - } -} -#endregion weather_function - -internal class MistralAICodeSnippet -{ - public async Task CreateMistralAIClientAsync() - { - #region create_mistral_agent - var apiKey = Environment.GetEnvironmentVariable("MISTRAL_API_KEY") ?? throw new Exception("Missing MISTRAL_API_KEY environment variable"); - var client = new MistralClient(apiKey: apiKey); - var agent = new MistralClientAgent( - client: client, - name: "MistralAI", - model: MistralAIModelID.OPEN_MISTRAL_7B) - .RegisterMessageConnector(); // support more AutoGen built-in message types. - - await agent.SendAsync("Hello, how are you?"); - #endregion create_mistral_agent - - #region streaming_chat - var reply = agent.GenerateStreamingReplyAsync( - messages: [new TextMessage(Role.User, "Hello, how are you?")] - ); - - await foreach (var message in reply) - { - if (message is TextMessageUpdate textMessageUpdate && textMessageUpdate.Content is string content) - { - Console.WriteLine(content); - } - } - #endregion streaming_chat - } - - public async Task MistralAIChatAgentGetWeatherToolUsageAsync() - { - #region create_mistral_function_call_agent - var apiKey = Environment.GetEnvironmentVariable("MISTRAL_API_KEY") ?? throw new Exception("Missing MISTRAL_API_KEY environment variable"); - var client = new MistralClient(apiKey: apiKey); - var agent = new MistralClientAgent( - client: client, - name: "MistralAI", - model: MistralAIModelID.MISTRAL_SMALL_LATEST) - .RegisterMessageConnector(); // support more AutoGen built-in message types like ToolCallMessage and ToolCallResultMessage - #endregion create_mistral_function_call_agent - - #region create_get_weather_function_call_middleware - var mistralFunctions = new MistralAgentFunction(); - var functionCallMiddleware = new FunctionCallMiddleware( - functions: [mistralFunctions.GetWeatherFunctionContract], - functionMap: new Dictionary>> // with functionMap, the function will be automatically triggered if the tool name matches one of the keys. - { - { mistralFunctions.GetWeatherFunctionContract.Name, mistralFunctions.GetWeather } - }); - #endregion create_get_weather_function_call_middleware - - #region register_function_call_middleware - agent = agent.RegisterStreamingMiddleware(functionCallMiddleware); - #endregion register_function_call_middleware - - #region send_message_with_function_call - var reply = await agent.SendAsync("What is the weather in Seattle?"); - reply.GetContent().Should().Be("The weather in Seattle is sunny."); - #endregion send_message_with_function_call - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/OpenAICodeSnippet.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/OpenAICodeSnippet.cs deleted file mode 100644 index 4e1fe4be99..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/OpenAICodeSnippet.cs +++ /dev/null @@ -1,142 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAICodeSnippet.cs - -#region using_statement -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -#endregion using_statement -using FluentAssertions; - -namespace AutoGen.BasicSample.CodeSnippet; -#region weather_function -public partial class Functions -{ - [Function] - public async Task GetWeather(string location) - { - return "The weather in " + location + " is sunny."; - } -} -#endregion weather_function -public partial class OpenAICodeSnippet -{ - [Function] - public async Task GetWeather(string location) - { - return "The weather in " + location + " is sunny."; - } - - public async Task CreateOpenAIChatAgentAsync() - { - #region create_openai_chat_agent - var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var modelId = "gpt-3.5-turbo"; - var openAIClient = new OpenAIClient(openAIKey); - - // create an open ai chat agent - var openAIChatAgent = new OpenAIChatAgent( - openAIClient: openAIClient, - name: "assistant", - modelName: modelId, - systemMessage: "You are an assistant that help user to do some tasks."); - - // OpenAIChatAgent supports the following message types: - // - IMessage where ChatRequestMessage is from Azure.AI.OpenAI - - var helloMessage = new ChatRequestUserMessage("Hello"); - - // Use MessageEnvelope.Create to create an IMessage - var chatMessageContent = MessageEnvelope.Create(helloMessage); - var reply = await openAIChatAgent.SendAsync(chatMessageContent); - - // The type of reply is MessageEnvelope where ChatResponseMessage is from Azure.AI.OpenAI - reply.Should().BeOfType>(); - - // You can un-envelop the reply to get the ChatResponseMessage - ChatResponseMessage response = reply.As>().Content; - response.Role.Should().Be(ChatRole.Assistant); - #endregion create_openai_chat_agent - - #region create_openai_chat_agent_streaming - var streamingReply = openAIChatAgent.GenerateStreamingReplyAsync(new[] { chatMessageContent }); - - await foreach (var streamingMessage in streamingReply) - { - streamingMessage.Should().BeOfType>(); - streamingMessage.As>().Content.Role.Should().Be(ChatRole.Assistant); - } - #endregion create_openai_chat_agent_streaming - - #region register_openai_chat_message_connector - // register message connector to support more message types - var agentWithConnector = openAIChatAgent - .RegisterMessageConnector(); - - // now the agentWithConnector supports more message types - var messages = new IMessage[] - { - MessageEnvelope.Create(new ChatRequestUserMessage("Hello")), - new TextMessage(Role.Assistant, "Hello", from: "user"), - new MultiModalMessage(Role.Assistant, - [ - new TextMessage(Role.Assistant, "Hello", from: "user"), - ], - from: "user"), - new TextMessage(Role.Assistant, "Hello", from: "user"), // Message type is going to be deprecated, please use TextMessage instead - }; - - foreach (var message in messages) - { - reply = await agentWithConnector.SendAsync(message); - - reply.Should().BeOfType(); - reply.As().From.Should().Be("assistant"); - } - #endregion register_openai_chat_message_connector - } - - public async Task OpenAIChatAgentGetWeatherFunctionCallAsync() - { - #region openai_chat_agent_get_weather_function_call - var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var modelId = "gpt-3.5-turbo"; - var openAIClient = new OpenAIClient(openAIKey); - - // create an open ai chat agent - var openAIChatAgent = new OpenAIChatAgent( - openAIClient: openAIClient, - name: "assistant", - modelName: modelId, - systemMessage: "You are an assistant that help user to do some tasks.") - .RegisterMessageConnector(); - - #endregion openai_chat_agent_get_weather_function_call - - #region create_function_call_middleware - var functions = new Functions(); - var functionCallMiddleware = new FunctionCallMiddleware( - functions: [functions.GetWeatherFunctionContract], // GetWeatherFunctionContract is auto-generated from the GetWeather function - functionMap: new Dictionary>> - { - { functions.GetWeatherFunctionContract.Name, functions.GetWeatherWrapper } // GetWeatherWrapper is a wrapper function for GetWeather, which is also auto-generated - }); - - openAIChatAgent = openAIChatAgent.RegisterStreamingMiddleware(functionCallMiddleware); - #endregion create_function_call_middleware - - #region chat_agent_send_function_call - var reply = await openAIChatAgent.SendAsync("what is the weather in Seattle?"); - reply.GetContent().Should().Be("The weather in Seattle is sunny."); - reply.GetToolCalls().Count.Should().Be(1); - reply.GetToolCalls().First().Should().Be(this.GetWeatherFunctionContract.Name); - #endregion chat_agent_send_function_call - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/PrintMessageMiddlewareCodeSnippet.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/PrintMessageMiddlewareCodeSnippet.cs deleted file mode 100644 index bf9e066ade..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/PrintMessageMiddlewareCodeSnippet.cs +++ /dev/null @@ -1,50 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// PrintMessageMiddlewareCodeSnippet.cs - -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure; -using Azure.AI.OpenAI; - -namespace AutoGen.BasicSample.CodeSnippet; - -internal class PrintMessageMiddlewareCodeSnippet -{ - public async Task PrintMessageMiddlewareAsync() - { - var config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - var endpoint = new Uri(config.Endpoint); - var openaiClient = new OpenAIClient(endpoint, new AzureKeyCredential(config.ApiKey)); - var agent = new OpenAIChatAgent(openaiClient, "assistant", config.DeploymentName) - .RegisterMessageConnector(); - - #region PrintMessageMiddleware - var agentWithPrintMessageMiddleware = agent - .RegisterPrintMessage(); - - await agentWithPrintMessageMiddleware.SendAsync("write a long poem"); - #endregion PrintMessageMiddleware - } - - public async Task PrintMessageStreamingMiddlewareAsync() - { - var config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - var endpoint = new Uri(config.Endpoint); - var openaiClient = new OpenAIClient(endpoint, new AzureKeyCredential(config.ApiKey)); - - #region print_message_streaming - var streamingAgent = new OpenAIChatAgent(openaiClient, "assistant", config.DeploymentName) - .RegisterMessageConnector() - .RegisterPrintMessage(); - - await streamingAgent.SendAsync("write a long poem"); - #endregion print_message_streaming - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/RunCodeSnippetCodeSnippet.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/RunCodeSnippetCodeSnippet.cs deleted file mode 100644 index eab3736df8..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/RunCodeSnippetCodeSnippet.cs +++ /dev/null @@ -1,86 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// RunCodeSnippetCodeSnippet.cs - -#region code_snippet_0_1 -using AutoGen.Core; -using AutoGen.DotnetInteractive; -using AutoGen.DotnetInteractive.Extension; -#endregion code_snippet_0_1 - -namespace AutoGen.BasicSample.CodeSnippet; -public class RunCodeSnippetCodeSnippet -{ - public async Task CodeSnippet1() - { - IAgent agent = new DefaultReplyAgent("agent", "Hello World"); - - #region code_snippet_1_1 - var kernel = DotnetInteractiveKernelBuilder - .CreateDefaultInProcessKernelBuilder() // add C# and F# kernels - .Build(); - #endregion code_snippet_1_1 - - #region code_snippet_1_2 - // register middleware to execute code block - var dotnetCodeAgent = agent - .RegisterMiddleware(async (msgs, option, innerAgent, ct) => - { - var lastMessage = msgs.LastOrDefault(); - if (lastMessage == null || lastMessage.GetContent() is null) - { - return await innerAgent.GenerateReplyAsync(msgs, option, ct); - } - - if (lastMessage.ExtractCodeBlock("```csharp", "```") is string codeSnippet) - { - // execute code snippet - var result = await kernel.RunSubmitCodeCommandAsync(codeSnippet, "csharp"); - return new TextMessage(Role.Assistant, result, from: agent.Name); - } - else - { - // no code block found, invoke next agent - return await innerAgent.GenerateReplyAsync(msgs, option, ct); - } - }); - - var codeSnippet = @" - ```csharp - Console.WriteLine(""Hello World""); - ```"; - - await dotnetCodeAgent.SendAsync(codeSnippet); - // output: Hello World - #endregion code_snippet_1_2 - - #region code_snippet_1_3 - var content = @" - ```csharp - // This is csharp code snippet - ``` - - ```python - // This is python code snippet - ``` - "; - #endregion code_snippet_1_3 - - #region code_snippet_1_4 - var pythonKernel = DotnetInteractiveKernelBuilder - .CreateDefaultInProcessKernelBuilder() - .AddPythonKernel(venv: "python3") - .Build(); - - var pythonCode = """ - print('Hello from Python!') - """; - var result = await pythonKernel.RunSubmitCodeCommandAsync(pythonCode, "python3"); - #endregion code_snippet_1_4 - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/SemanticKernelCodeSnippet.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/SemanticKernelCodeSnippet.cs deleted file mode 100644 index e5cd926903..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/SemanticKernelCodeSnippet.cs +++ /dev/null @@ -1,107 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// SemanticKernelCodeSnippet.cs - -using AutoGen.Core; -using AutoGen.SemanticKernel; -using AutoGen.SemanticKernel.Extension; -using FluentAssertions; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; - -namespace AutoGen.BasicSample.CodeSnippet; - -public class SemanticKernelCodeSnippet -{ - public async Task GetWeather(string location) - { - return "The weather in " + location + " is sunny."; - } - public async Task CreateSemanticKernelAgentAsync() - { - #region create_semantic_kernel_agent - var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var modelId = "gpt-3.5-turbo"; - var builder = Kernel.CreateBuilder() - .AddOpenAIChatCompletion(modelId: modelId, apiKey: openAIKey); - var kernel = builder.Build(); - - // create a semantic kernel agent - var semanticKernelAgent = new SemanticKernelAgent( - kernel: kernel, - name: "assistant", - systemMessage: "You are an assistant that help user to do some tasks."); - - // SemanticKernelAgent supports the following message types: - // - IMessage where ChatMessageContent is from Azure.AI.OpenAI - - var helloMessage = new ChatMessageContent(AuthorRole.User, "Hello"); - - // Use MessageEnvelope.Create to create an IMessage - var chatMessageContent = MessageEnvelope.Create(helloMessage); - var reply = await semanticKernelAgent.SendAsync(chatMessageContent); - - // The type of reply is MessageEnvelope where ChatResponseMessage is from Azure.AI.OpenAI - reply.Should().BeOfType>(); - - // You can un-envelop the reply to get the ChatResponseMessage - ChatMessageContent response = reply.As>().Content; - response.Role.Should().Be(AuthorRole.Assistant); - #endregion create_semantic_kernel_agent - - #region create_semantic_kernel_agent_streaming - var streamingReply = semanticKernelAgent.GenerateStreamingReplyAsync(new[] { chatMessageContent }); - - await foreach (var streamingMessage in streamingReply) - { - streamingMessage.Should().BeOfType>(); - streamingMessage.As>().From.Should().Be("assistant"); - } - #endregion create_semantic_kernel_agent_streaming - } - - public async Task SemanticKernelChatMessageContentConnector() - { - #region register_semantic_kernel_chat_message_content_connector - var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var modelId = "gpt-3.5-turbo"; - var builder = Kernel.CreateBuilder() - .AddOpenAIChatCompletion(modelId: modelId, apiKey: openAIKey); - var kernel = builder.Build(); - - // create a semantic kernel agent - var semanticKernelAgent = new SemanticKernelAgent( - kernel: kernel, - name: "assistant", - systemMessage: "You are an assistant that help user to do some tasks."); - - // Register the connector middleware to the kernel agent - var semanticKernelAgentWithConnector = semanticKernelAgent - .RegisterMessageConnector(); - - // now semanticKernelAgentWithConnector supports more message types - IMessage[] messages = [ - MessageEnvelope.Create(new ChatMessageContent(AuthorRole.User, "Hello")), - new TextMessage(Role.Assistant, "Hello", from: "user"), - new MultiModalMessage(Role.Assistant, - [ - new TextMessage(Role.Assistant, "Hello", from: "user"), - ], - from: "user"), - ]; - - foreach (var message in messages) - { - var reply = await semanticKernelAgentWithConnector.SendAsync(message); - - // SemanticKernelChatMessageContentConnector will convert the reply message to TextMessage - reply.Should().BeOfType(); - } - #endregion register_semantic_kernel_chat_message_content_connector - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/TypeSafeFunctionCallCodeSnippet.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/TypeSafeFunctionCallCodeSnippet.cs deleted file mode 100644 index f2814394db..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/TypeSafeFunctionCallCodeSnippet.cs +++ /dev/null @@ -1,127 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// TypeSafeFunctionCallCodeSnippet.cs - -using System.Text.Json; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -#region weather_report_using_statement -using AutoGen.Core; -#endregion weather_report_using_statement - -#region weather_report -public partial class TypeSafeFunctionCall -{ - /// - /// Get weather report - /// - /// city - /// date - [Function] - public async Task WeatherReport(string city, string date) - { - return $"Weather report for {city} on {date} is sunny"; - } -} -#endregion weather_report - -public partial class TypeSafeFunctionCall -{ - public async Task Consume() - { - #region weather_report_consume - var functionInstance = new TypeSafeFunctionCall(); - - // Get the generated function definition - FunctionDefinition functionDefiniton = functionInstance.WeatherReportFunctionContract.ToOpenAIFunctionDefinition(); - - // Get the generated function wrapper - Func> functionWrapper = functionInstance.WeatherReportWrapper; - - // ... - #endregion weather_report_consume - } -} -#region code_snippet_3 -// file: FunctionCall.cs - -public partial class TypeSafeFunctionCall -{ - /// - /// convert input to upper case - /// - /// input - [Function] - public async Task UpperCase(string input) - { - var result = input.ToUpper(); - return result; - } -} -#endregion code_snippet_3 - -public class TypeSafeFunctionCallCodeSnippet -{ - public async Task UpperCase(string input) - { - var result = input.ToUpper(); - return result; - } - - #region code_snippet_1 - // file: FunctionDefinition.generated.cs - public FunctionDefinition UpperCaseFunction - { - get => new FunctionDefinition - { - Name = @"UpperCase", - Description = "convert input to upper case", - Parameters = BinaryData.FromObjectAsJson(new - { - Type = "object", - Properties = new - { - input = new - { - Type = @"string", - Description = @"input", - }, - }, - Required = new[] - { - "input", - }, - }, - new JsonSerializerOptions - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - }) - }; - } - #endregion code_snippet_1 - - #region code_snippet_2 - // file: FunctionDefinition.generated.cs - private class UpperCaseSchema - { - public string input { get; set; } - } - - public Task UpperCaseWrapper(string arguments) - { - var schema = JsonSerializer.Deserialize( - arguments, - new JsonSerializerOptions - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - }); - - return UpperCase(schema.input); - } - #endregion code_snippet_2 -} diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/UserProxyAgentCodeSnippet.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/UserProxyAgentCodeSnippet.cs deleted file mode 100644 index 7fcbc21ca3..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/UserProxyAgentCodeSnippet.cs +++ /dev/null @@ -1,26 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// UserProxyAgentCodeSnippet.cs -using AutoGen.Core; - -namespace AutoGen.BasicSample.CodeSnippet; - -public class UserProxyAgentCodeSnippet -{ - public async Task CodeSnippet1() - { - #region code_snippet_1 - // create a user proxy agent which always ask user for input - var agent = new UserProxyAgent( - name: "user", - humanInputMode: HumanInputMode.ALWAYS); - - await agent.SendAsync("hello"); - #endregion code_snippet_1 - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/Example01_AssistantAgent.cs b/dotnet/sample/AutoGen.BasicSamples/Example01_AssistantAgent.cs deleted file mode 100644 index 79c055a1c7..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Example01_AssistantAgent.cs +++ /dev/null @@ -1,52 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Example01_AssistantAgent.cs - -using AutoGen; -using AutoGen.BasicSample; -using AutoGen.Core; -using FluentAssertions; - -/// -/// This example shows the basic usage of class. -/// -public static class Example01_AssistantAgent -{ - public static async Task RunAsync() - { - var gpt35 = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - var config = new ConversableAgentConfig - { - Temperature = 0, - ConfigList = [gpt35], - }; - - // create assistant agent - var assistantAgent = new AssistantAgent( - name: "assistant", - systemMessage: "You convert what user said to all uppercase.", - llmConfig: config) - .RegisterPrintMessage(); - - // talk to the assistant agent - var reply = await assistantAgent.SendAsync("hello world"); - reply.Should().BeOfType(); - reply.GetContent().Should().Be("HELLO WORLD"); - - // to carry on the conversation, pass the previous conversation history to the next call - var conversationHistory = new List - { - new TextMessage(Role.User, "hello world"), // first message - reply, // reply from assistant agent - }; - - reply = await assistantAgent.SendAsync("hello world again", conversationHistory); - reply.Should().BeOfType(); - reply.GetContent().Should().Be("HELLO WORLD AGAIN"); - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/Example02_TwoAgent_MathChat.cs b/dotnet/sample/AutoGen.BasicSamples/Example02_TwoAgent_MathChat.cs deleted file mode 100644 index af629f758c..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Example02_TwoAgent_MathChat.cs +++ /dev/null @@ -1,86 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Example02_TwoAgent_MathChat.cs - -using AutoGen; -using AutoGen.BasicSample; -using AutoGen.Core; -using FluentAssertions; -public static class Example02_TwoAgent_MathChat -{ - public static async Task RunAsync() - { - #region code_snippet_1 - // get gpt-3.5-turbo config - var gpt35 = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - - // create teacher agent - // teacher agent will create math questions - var teacher = new AssistantAgent( - name: "teacher", - systemMessage: @"You are a teacher that create pre-school math question for student and check answer. - If the answer is correct, you stop the conversation by saying [COMPLETE]. - If the answer is wrong, you ask student to fix it.", - llmConfig: new ConversableAgentConfig - { - Temperature = 0, - ConfigList = [gpt35], - }) - .RegisterMiddleware(async (msgs, option, agent, _) => - { - var reply = await agent.GenerateReplyAsync(msgs, option); - if (reply.GetContent()?.ToLower().Contains("complete") is true) - { - return new TextMessage(Role.Assistant, GroupChatExtension.TERMINATE, from: reply.From); - } - - return reply; - }) - .RegisterPrintMessage(); - - // create student agent - // student agent will answer the math questions - var student = new AssistantAgent( - name: "student", - systemMessage: "You are a student that answer question from teacher", - llmConfig: new ConversableAgentConfig - { - Temperature = 0, - ConfigList = [gpt35], - }) - .RegisterPrintMessage(); - - // start the conversation - var conversation = await student.InitiateChatAsync( - receiver: teacher, - message: "Hey teacher, please create math question for me.", - maxRound: 10); - - // output - // Message from teacher - // -------------------- - // content: Of course!Here's a math question for you: - // - // What is 2 + 3 ? - // -------------------- - // - // Message from student - // -------------------- - // content: The sum of 2 and 3 is 5. - // -------------------- - // - // Message from teacher - // -------------------- - // content: [GROUPCHAT_TERMINATE] - // -------------------- - #endregion code_snippet_1 - - conversation.Count().Should().BeLessThan(10); - conversation.Last().IsGroupChatTerminateMessage().Should().BeTrue(); - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/Example03_Agent_FunctionCall.cs b/dotnet/sample/AutoGen.BasicSamples/Example03_Agent_FunctionCall.cs deleted file mode 100644 index 7983554d66..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Example03_Agent_FunctionCall.cs +++ /dev/null @@ -1,112 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Example03_Agent_FunctionCall.cs - -using AutoGen; -using AutoGen.BasicSample; -using AutoGen.Core; -using FluentAssertions; - -/// -/// This example shows how to add type-safe function call to an agent. -/// -public partial class Example03_Agent_FunctionCall -{ - /// - /// upper case the message when asked. - /// - /// - [Function] - public async Task UpperCase(string message) - { - return message.ToUpper(); - } - - /// - /// Concatenate strings. - /// - /// strings to concatenate - [Function] - public async Task ConcatString(string[] strings) - { - return string.Join(" ", strings); - } - - /// - /// calculate tax - /// - /// price, should be an integer - /// tax rate, should be in range (0, 1) - [FunctionAttribute] - public async Task CalculateTax(int price, float taxRate) - { - return $"tax is {price * taxRate}"; - } - - public static async Task RunAsync() - { - var instance = new Example03_Agent_FunctionCall(); - var gpt35 = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - - // AutoGen makes use of AutoGen.SourceGenerator to automatically generate FunctionDefinition and FunctionCallWrapper for you. - // The FunctionDefinition will be created based on function signature and XML documentation. - // The return type of type-safe function needs to be Task. And to get the best performance, please try only use primitive types and arrays of primitive types as parameters. - var config = new ConversableAgentConfig - { - Temperature = 0, - ConfigList = [gpt35], - FunctionContracts = new[] - { - instance.ConcatStringFunctionContract, - instance.UpperCaseFunctionContract, - instance.CalculateTaxFunctionContract, - }, - }; - - var agent = new AssistantAgent( - name: "agent", - systemMessage: "You are a helpful AI assistant", - llmConfig: config, - functionMap: new Dictionary>> - { - { nameof(ConcatString), instance.ConcatStringWrapper }, - { nameof(UpperCase), instance.UpperCaseWrapper }, - { nameof(CalculateTax), instance.CalculateTaxWrapper }, - }) - .RegisterPrintMessage(); - - // talk to the assistant agent - var upperCase = await agent.SendAsync("convert to upper case: hello world"); - upperCase.GetContent()?.Should().Be("HELLO WORLD"); - upperCase.Should().BeOfType(); - upperCase.GetToolCalls().Should().HaveCount(1); - upperCase.GetToolCalls().First().FunctionName.Should().Be(nameof(UpperCase)); - - var concatString = await agent.SendAsync("concatenate strings: a, b, c, d, e"); - concatString.GetContent()?.Should().Be("a b c d e"); - concatString.Should().BeOfType(); - concatString.GetToolCalls().Should().HaveCount(1); - concatString.GetToolCalls().First().FunctionName.Should().Be(nameof(ConcatString)); - - var calculateTax = await agent.SendAsync("calculate tax: 100, 0.1"); - calculateTax.GetContent().Should().Be("tax is 10"); - calculateTax.Should().BeOfType(); - calculateTax.GetToolCalls().Should().HaveCount(1); - calculateTax.GetToolCalls().First().FunctionName.Should().Be(nameof(CalculateTax)); - - // parallel function calls - var calculateTaxes = await agent.SendAsync("calculate tax: 100, 0.1; calculate tax: 200, 0.2"); - calculateTaxes.GetContent().Should().Be("tax is 10\ntax is 40"); // "tax is 10\n tax is 40 - calculateTaxes.Should().BeOfType(); - calculateTaxes.GetToolCalls().Should().HaveCount(2); - calculateTaxes.GetToolCalls().First().FunctionName.Should().Be(nameof(CalculateTax)); - - // send aggregate message back to llm to get the final result - var finalResult = await agent.SendAsync(calculateTaxes); - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/Example04_Dynamic_GroupChat_Coding_Task.cs b/dotnet/sample/AutoGen.BasicSamples/Example04_Dynamic_GroupChat_Coding_Task.cs deleted file mode 100644 index 4dd6ac5633..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Example04_Dynamic_GroupChat_Coding_Task.cs +++ /dev/null @@ -1,270 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Example04_Dynamic_GroupChat_Coding_Task.cs - -using AutoGen; -using AutoGen.BasicSample; -using AutoGen.Core; -using AutoGen.DotnetInteractive; -using AutoGen.DotnetInteractive.Extension; -using AutoGen.OpenAI.V1; -using FluentAssertions; - -public partial class Example04_Dynamic_GroupChat_Coding_Task -{ - public static async Task RunAsync() - { - var instance = new Example04_Dynamic_GroupChat_Coding_Task(); - - var kernel = DotnetInteractiveKernelBuilder - .CreateDefaultInProcessKernelBuilder() - .AddPythonKernel("python3") - .Build(); - - var gptConfig = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - - var groupAdmin = new GPTAgent( - name: "groupAdmin", - systemMessage: "You are the admin of the group chat", - temperature: 0f, - config: gptConfig) - .RegisterPrintMessage(); - - var userProxy = new UserProxyAgent(name: "user", defaultReply: GroupChatExtension.TERMINATE, humanInputMode: HumanInputMode.NEVER) - .RegisterPrintMessage(); - - // Create admin agent - var admin = new AssistantAgent( - name: "admin", - systemMessage: """ - You are a manager who takes coding problem from user and resolve problem by splitting them into small tasks and assign each task to the most appropriate agent. - Here's available agents who you can assign task to: - - coder: write python code to resolve task - - runner: run python code from coder - - The workflow is as follows: - - You take the coding problem from user - - You break the problem into small tasks. For each tasks you first ask coder to write code to resolve the task. Once the code is written, you ask runner to run the code. - - Once a small task is resolved, you summarize the completed steps and create the next step. - - You repeat the above steps until the coding problem is resolved. - - You can use the following json format to assign task to agents: - ```task - { - "to": "{agent_name}", - "task": "{a short description of the task}", - "context": "{previous context from scratchpad}" - } - ``` - - If you need to ask user for extra information, you can use the following format: - ```ask - { - "question": "{question}" - } - ``` - - Once the coding problem is resolved, summarize each steps and results and send the summary to the user using the following format: - ```summary - @user, - ``` - - Your reply must contain one of [task|ask|summary] to indicate the type of your message. - """, - llmConfig: new ConversableAgentConfig - { - Temperature = 0, - ConfigList = [gptConfig], - }) - .RegisterPrintMessage(); - - // create coder agent - // The coder agent is a composite agent that contains dotnet coder, code reviewer and nuget agent. - // The dotnet coder write dotnet code to resolve the task. - // The code reviewer review the code block from coder's reply. - // The nuget agent install nuget packages if there's any. - var coderAgent = new GPTAgent( - name: "coder", - systemMessage: @"You act as python coder, you write python code to resolve task. Once you finish writing code, ask runner to run the code for you. - -Here're some rules to follow on writing dotnet code: -- put code between ```python and ``` -- Try avoid using external library -- Always print out the result to console. Don't write code that doesn't print out anything. - -Use the following format to install pip package: -```python -%pip install -``` - -If your code is incorrect, Fix the error and send the code again. - -Here's some externel information -- The link to mlnet repo is: https://github.com/dotnet/machinelearning. you don't need a token to use github pr api. Make sure to include a User-Agent header, otherwise github will reject it. -", - config: gptConfig, - temperature: 0.4f) - .RegisterPrintMessage(); - - // code reviewer agent will review if code block from coder's reply satisfy the following conditions: - // - There's only one code block - // - The code block is csharp code block - // - The code block is top level statement - // - The code block is not using declaration - var codeReviewAgent = new GPTAgent( - name: "reviewer", - systemMessage: """ - You are a code reviewer who reviews code from coder. You need to check if the code satisfy the following conditions: - - The reply from coder contains at least one code block, e.g ```python and ``` - - There's only one code block and it's python code block - - You don't check the code style, only check if the code satisfy the above conditions. - - Put your comment between ```review and ```, if the code satisfies all conditions, put APPROVED in review.result field. Otherwise, put REJECTED along with comments. make sure your comment is clear and easy to understand. - - ## Example 1 ## - ```review - comment: The code satisfies all conditions. - result: APPROVED - ``` - - ## Example 2 ## - ```review - comment: The code is inside main function. Please rewrite the code in top level statement. - result: REJECTED - ``` - - """, - config: gptConfig, - temperature: 0f) - .RegisterPrintMessage(); - - // create runner agent - // The runner agent will run the code block from coder's reply. - // It runs dotnet code using dotnet interactive service hook. - // It also truncate the output if the output is too long. - var runner = new DefaultReplyAgent( - name: "runner", - defaultReply: "No code available, coder, write code please") - .RegisterMiddleware(async (msgs, option, agent, ct) => - { - var mostRecentCoderMessage = msgs.LastOrDefault(x => x.From == "coder") ?? throw new Exception("No coder message found"); - - if (mostRecentCoderMessage.ExtractCodeBlock("```python", "```") is string code) - { - var result = await kernel.RunSubmitCodeCommandAsync(code, "python"); - // only keep the first 500 characters - if (result.Length > 500) - { - result = result.Substring(0, 500); - } - result = $""" - # [CODE_BLOCK_EXECUTION_RESULT] - {result} - """; - - return new TextMessage(Role.Assistant, result, from: agent.Name); - } - else - { - return await agent.GenerateReplyAsync(msgs, option, ct); - } - }) - .RegisterPrintMessage(); - - var adminToCoderTransition = Transition.Create(admin, coderAgent, async (from, to, messages) => - { - // the last message should be from admin - var lastMessage = messages.Last(); - if (lastMessage.From != admin.Name) - { - return false; - } - - return true; - }); - var coderToReviewerTransition = Transition.Create(coderAgent, codeReviewAgent); - var adminToRunnerTransition = Transition.Create(admin, runner, async (from, to, messages) => - { - // the last message should be from admin - var lastMessage = messages.Last(); - if (lastMessage.From != admin.Name) - { - return false; - } - - // the previous messages should contain a message from coder - var coderMessage = messages.FirstOrDefault(x => x.From == coderAgent.Name); - if (coderMessage is null) - { - return false; - } - - return true; - }); - - var runnerToAdminTransition = Transition.Create(runner, admin); - - var reviewerToAdminTransition = Transition.Create(codeReviewAgent, admin); - - var adminToUserTransition = Transition.Create(admin, userProxy, async (from, to, messages) => - { - // the last message should be from admin - var lastMessage = messages.Last(); - if (lastMessage.From != admin.Name) - { - return false; - } - - return true; - }); - - var userToAdminTransition = Transition.Create(userProxy, admin); - - var workflow = new Graph( - [ - adminToCoderTransition, - coderToReviewerTransition, - reviewerToAdminTransition, - adminToRunnerTransition, - runnerToAdminTransition, - adminToUserTransition, - userToAdminTransition, - ]); - - // create group chat - var groupChat = new GroupChat( - admin: groupAdmin, - members: [admin, coderAgent, runner, codeReviewAgent, userProxy], - workflow: workflow); - - // task 1: retrieve the most recent pr from mlnet and save it in result.txt - var task = """ - retrieve the most recent pr from mlnet and save it in result.txt - """; - var chatHistory = new List - { - new TextMessage(Role.Assistant, task) - { - From = userProxy.Name - } - }; - await foreach (var message in groupChat.SendAsync(chatHistory, maxRound: 10)) - { - if (message.From == admin.Name && message.GetContent().Contains("```summary")) - { - // Task complete! - break; - } - } - - // check if the result file is created - var result = "result.txt"; - File.Exists(result).Should().BeTrue(); - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/Example05_Dalle_And_GPT4V.cs b/dotnet/sample/AutoGen.BasicSamples/Example05_Dalle_And_GPT4V.cs deleted file mode 100644 index 4ab6a13988..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Example05_Dalle_And_GPT4V.cs +++ /dev/null @@ -1,140 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Example05_Dalle_And_GPT4V.cs - -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -using FluentAssertions; -using autogen = AutoGen.LLMConfigAPI; - -public partial class Example05_Dalle_And_GPT4V -{ - private readonly OpenAIClient openAIClient; - - public Example05_Dalle_And_GPT4V(OpenAIClient openAIClient) - { - this.openAIClient = openAIClient; - } - - /// - /// Generate image from prompt using DALL-E. - /// - /// prompt with feedback - /// - [Function] - public async Task GenerateImage(string prompt) - { - // TODO - // generate image from prompt using DALL-E - // and return url. - var option = new ImageGenerationOptions - { - Size = ImageSize.Size1024x1024, - Style = ImageGenerationStyle.Vivid, - ImageCount = 1, - Prompt = prompt, - Quality = ImageGenerationQuality.Standard, - DeploymentName = "dall-e-3", - }; - - var imageResponse = await openAIClient.GetImageGenerationsAsync(option); - var imageUrl = imageResponse.Value.Data.First().Url.OriginalString; - - return $@"// ignore this line [IMAGE_GENERATION] -The image is generated from prompt {prompt} - -{imageUrl}"; - } - - public static async Task RunAsync() - { - // This example shows how to use DALL-E and GPT-4V to generate image from prompt and feedback. - // The DALL-E agent will generate image from prompt. - // The GPT-4V agent will provide feedback to DALL-E agent to help it generate better image. - // The conversation will be terminated when the image satisfies the condition. - // The image will be saved to image.jpg in current directory. - - // get OpenAI Key and create config - var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var gpt35Config = autogen.GetOpenAIConfigList(openAIKey, new[] { "gpt-3.5-turbo" }); - var gpt4vConfig = autogen.GetOpenAIConfigList(openAIKey, new[] { "gpt-4-vision-preview" }); - var openAIClient = new OpenAIClient(openAIKey); - var instance = new Example05_Dalle_And_GPT4V(openAIClient); - var imagePath = Path.Combine("resource", "images", "background.png"); - if (File.Exists(imagePath)) - { - File.Delete(imagePath); - } - - var generateImageFunctionMiddleware = new FunctionCallMiddleware( - functions: [instance.GenerateImageFunctionContract], - functionMap: new Dictionary>> - { - { nameof(GenerateImage), instance.GenerateImageWrapper }, - }); - var dalleAgent = new OpenAIChatAgent( - openAIClient: openAIClient, - modelName: "gpt-3.5-turbo", - name: "dalle", - systemMessage: "You are a DALL-E agent that generate image from prompt, when conversation is terminated, return the most recent image url") - .RegisterMessageConnector() - .RegisterStreamingMiddleware(generateImageFunctionMiddleware) - .RegisterMiddleware(async (msgs, option, agent, ct) => - { - if (msgs.Any(msg => msg.GetContent()?.ToLower().Contains("approve") is true)) - { - return new TextMessage(Role.Assistant, $"The image satisfies the condition, conversation is terminated. {GroupChatExtension.TERMINATE}"); - } - - var msgsWithoutImage = msgs.Where(msg => msg is not ImageMessage).ToList(); - var reply = await agent.GenerateReplyAsync(msgsWithoutImage, option, ct); - - if (reply.GetContent() is string content && content.Contains("IMAGE_GENERATION")) - { - var imageUrl = content.Split("\n").Last(); - var imageMessage = new ImageMessage(Role.Assistant, imageUrl, from: reply.From, mimeType: "image/png"); - - Console.WriteLine($"download image from {imageUrl} to {imagePath}"); - var httpClient = new HttpClient(); - var imageBytes = await httpClient.GetByteArrayAsync(imageUrl, ct); - File.WriteAllBytes(imagePath, imageBytes); - - return imageMessage; - } - else - { - return reply; - } - }) - .RegisterPrintMessage(); - - var gpt4VAgent = new OpenAIChatAgent( - openAIClient: openAIClient, - name: "gpt4v", - modelName: "gpt-4-vision-preview", - systemMessage: @"You are a critism that provide feedback to DALL-E agent. -Carefully check the image generated by DALL-E agent and provide feedback. -If the image satisfies the condition, then say [APPROVE]. -Otherwise, provide detailed feedback to DALL-E agent so it can generate better image. - -The image should satisfy the following conditions: -- There should be a cat and a mouse in the image -- The cat should be chasing after the mouse") - .RegisterMessageConnector() - .RegisterPrintMessage(); - - await gpt4VAgent.InitiateChatAsync( - receiver: dalleAgent, - message: "Hey dalle, please generate image from prompt: English short hair blue cat chase after a mouse", - maxRound: 10); - - File.Exists(imagePath).Should().BeTrue(); - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/Example06_UserProxyAgent.cs b/dotnet/sample/AutoGen.BasicSamples/Example06_UserProxyAgent.cs deleted file mode 100644 index d3d51b7c42..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Example06_UserProxyAgent.cs +++ /dev/null @@ -1,38 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Example06_UserProxyAgent.cs -using AutoGen.Core; -using AutoGen.OpenAI.V1; - -namespace AutoGen.BasicSample; - -public static class Example06_UserProxyAgent -{ - public static async Task RunAsync() - { - var gpt35 = LLMConfiguration.GetOpenAIGPT3_5_Turbo(); - - var assistantAgent = new GPTAgent( - name: "assistant", - systemMessage: "You are an assistant that help user to do some tasks.", - config: gpt35) - .RegisterPrintMessage(); - - // set human input mode to ALWAYS so that user always provide input - var userProxyAgent = new UserProxyAgent( - name: "user", - humanInputMode: HumanInputMode.ALWAYS) - .RegisterPrintMessage(); - - // start the conversation - await userProxyAgent.InitiateChatAsync( - receiver: assistantAgent, - message: "Hey assistant, please help me to do some tasks.", - maxRound: 10); - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/Example07_Dynamic_GroupChat_Calculate_Fibonacci.cs b/dotnet/sample/AutoGen.BasicSamples/Example07_Dynamic_GroupChat_Calculate_Fibonacci.cs deleted file mode 100644 index 6cf0c50db2..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Example07_Dynamic_GroupChat_Calculate_Fibonacci.cs +++ /dev/null @@ -1,389 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Example07_Dynamic_GroupChat_Calculate_Fibonacci.cs - -using System.Text; -using System.Text.Json; -using AutoGen.BasicSample; -using AutoGen.Core; -using AutoGen.DotnetInteractive; -using AutoGen.DotnetInteractive.Extension; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -using Microsoft.DotNet.Interactive; - -public partial class Example07_Dynamic_GroupChat_Calculate_Fibonacci -{ - #region reviewer_function - public struct CodeReviewResult - { - public bool HasMultipleCodeBlocks { get; set; } - public bool IsTopLevelStatement { get; set; } - public bool IsDotnetCodeBlock { get; set; } - public bool IsPrintResultToConsole { get; set; } - } - - /// - /// review code block - /// - /// true if there're multipe csharp code blocks - /// true if the code is in top level statement - /// true if the code block is csharp code block - /// true if the code block print out result to console - [Function] - public async Task ReviewCodeBlock( - bool hasMultipleCodeBlocks, - bool isTopLevelStatement, - bool isDotnetCodeBlock, - bool isPrintResultToConsole) - { - var obj = new CodeReviewResult - { - HasMultipleCodeBlocks = hasMultipleCodeBlocks, - IsTopLevelStatement = isTopLevelStatement, - IsDotnetCodeBlock = isDotnetCodeBlock, - IsPrintResultToConsole = isPrintResultToConsole, - }; - - return JsonSerializer.Serialize(obj); - } - #endregion reviewer_function - - #region create_coder - public static async Task CreateCoderAgentAsync(OpenAIClient client, string deployModel) - { - var coder = new OpenAIChatAgent( - openAIClient: client, - modelName: deployModel, - name: "coder", - systemMessage: @"You act as dotnet coder, you write dotnet code to resolve task. Once you finish writing code, ask runner to run the code for you. - - Here're some rules to follow on writing dotnet code: - - put code between ```csharp and ``` - - Avoid adding `using` keyword when creating disposable object. e.g `var httpClient = new HttpClient()` - - Try to use `var` instead of explicit type. - - Try avoid using external library, use .NET Core library instead. - - Use top level statement to write code. - - Always print out the result to console. Don't write code that doesn't print out anything. - - If you need to install nuget packages, put nuget packages in the following format: - ```nuget - nuget_package_name - ``` - - If your code is incorrect, runner will tell you the error message. Fix the error and send the code again.", - temperature: 0.4f) - .RegisterMessageConnector() - .RegisterPrintMessage(); - - return coder; - } - #endregion create_coder - - #region create_runner - public static async Task CreateRunnerAgentAsync(Kernel kernel) - { - var runner = new DefaultReplyAgent( - name: "runner", - defaultReply: "No code available.") - .RegisterMiddleware(async (msgs, option, agent, _) => - { - if (msgs.Count() == 0 || msgs.All(msg => msg.From != "coder")) - { - return new TextMessage(Role.Assistant, "No code available. Coder please write code"); - } - else - { - var coderMsg = msgs.Last(msg => msg.From == "coder"); - if (coderMsg.ExtractCodeBlock("```csharp", "```") is string code) - { - var codeResult = await kernel.RunSubmitCodeCommandAsync(code, "csharp"); - - codeResult = $""" - [RUNNER_RESULT] - {codeResult} - """; - - return new TextMessage(Role.Assistant, codeResult) - { - From = "runner", - }; - } - else - { - return new TextMessage(Role.Assistant, "No code available. Coder please write code"); - } - } - }) - .RegisterPrintMessage(); - - return runner; - } - #endregion create_runner - - #region create_admin - public static async Task CreateAdminAsync(OpenAIClient client, string deployModel) - { - var admin = new OpenAIChatAgent( - openAIClient: client, - modelName: deployModel, - name: "admin", - temperature: 0) - .RegisterMessageConnector() - .RegisterPrintMessage(); - - return admin; - } - #endregion create_admin - - #region create_reviewer - public static async Task CreateReviewerAgentAsync(OpenAIClient openAIClient, string deployModel) - { - var gpt3Config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - var functions = new Example07_Dynamic_GroupChat_Calculate_Fibonacci(); - var functionCallMiddleware = new FunctionCallMiddleware( - functions: [functions.ReviewCodeBlockFunctionContract], - functionMap: new Dictionary>>() - { - { nameof(functions.ReviewCodeBlock), functions.ReviewCodeBlockWrapper }, - }); - var reviewer = new OpenAIChatAgent( - openAIClient: openAIClient, - name: "code_reviewer", - systemMessage: @"You review code block from coder", - modelName: deployModel) - .RegisterMessageConnector() - .RegisterStreamingMiddleware(functionCallMiddleware) - .RegisterMiddleware(async (msgs, option, innerAgent, ct) => - { - var maxRetry = 3; - var reply = await innerAgent.GenerateReplyAsync(msgs, option, ct); - while (maxRetry-- > 0) - { - if (reply.GetToolCalls() is var toolCalls && toolCalls.Count() == 1 && toolCalls[0].FunctionName == nameof(ReviewCodeBlock)) - { - var toolCallResult = reply.GetContent(); - var reviewResultObj = JsonSerializer.Deserialize(toolCallResult); - var reviews = new List(); - if (reviewResultObj.HasMultipleCodeBlocks) - { - var fixCodeBlockPrompt = @"There're multiple code blocks, please combine them into one code block"; - reviews.Add(fixCodeBlockPrompt); - } - - if (reviewResultObj.IsDotnetCodeBlock is false) - { - var fixCodeBlockPrompt = @"The code block is not csharp code block, please write dotnet code only"; - reviews.Add(fixCodeBlockPrompt); - } - - if (reviewResultObj.IsTopLevelStatement is false) - { - var fixCodeBlockPrompt = @"The code is not top level statement, please rewrite your dotnet code using top level statement"; - reviews.Add(fixCodeBlockPrompt); - } - - if (reviewResultObj.IsPrintResultToConsole is false) - { - var fixCodeBlockPrompt = @"The code doesn't print out result to console, please print out result to console"; - reviews.Add(fixCodeBlockPrompt); - } - - if (reviews.Count > 0) - { - var sb = new StringBuilder(); - sb.AppendLine("There're some comments from code reviewer, please fix these comments"); - foreach (var review in reviews) - { - sb.AppendLine($"- {review}"); - } - - return new TextMessage(Role.Assistant, sb.ToString(), from: "code_reviewer"); - } - else - { - var msg = new TextMessage(Role.Assistant, "The code looks good, please ask runner to run the code for you.") - { - From = "code_reviewer", - }; - - return msg; - } - } - else - { - var originalContent = reply.GetContent(); - var prompt = $@"Please convert the content to ReviewCodeBlock function arguments. - - ## Original Content - {originalContent}"; - - reply = await innerAgent.SendAsync(prompt, msgs, ct); - } - } - - throw new Exception("Failed to review code block"); - }) - .RegisterPrintMessage(); - - return reviewer; - } - #endregion create_reviewer - - public static async Task RunWorkflowAsync() - { - long the39thFibonacciNumber = 63245986; - var kernel = DotnetInteractiveKernelBuilder - .CreateDefaultInProcessKernelBuilder() - .Build(); - - var config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - var openaiClient = new OpenAIClient(new Uri(config.Endpoint), new Azure.AzureKeyCredential(config.ApiKey)); - - #region create_workflow - var reviewer = await CreateReviewerAgentAsync(openaiClient, config.DeploymentName); - var coder = await CreateCoderAgentAsync(openaiClient, config.DeploymentName); - var runner = await CreateRunnerAgentAsync(kernel); - var admin = await CreateAdminAsync(openaiClient, config.DeploymentName); - - var admin2CoderTransition = Transition.Create(admin, coder); - var coder2ReviewerTransition = Transition.Create(coder, reviewer); - var reviewer2RunnerTransition = Transition.Create( - from: reviewer, - to: runner, - canTransitionAsync: async (from, to, messages) => - { - var lastMessage = messages.Last(); - if (lastMessage is TextMessage textMessage && textMessage.Content.ToLower().Contains("the code looks good, please ask runner to run the code for you.") is true) - { - // ask runner to run the code - return true; - } - - return false; - }); - var reviewer2CoderTransition = Transition.Create( - from: reviewer, - to: coder, - canTransitionAsync: async (from, to, messages) => - { - var lastMessage = messages.Last(); - if (lastMessage is TextMessage textMessage && textMessage.Content.ToLower().Contains("there're some comments from code reviewer, please fix these comments") is true) - { - // ask coder to fix the code based on reviewer's comments - return true; - } - - return false; - }); - - var runner2CoderTransition = Transition.Create( - from: runner, - to: coder, - canTransitionAsync: async (from, to, messages) => - { - var lastMessage = messages.Last(); - if (lastMessage is TextMessage textMessage && textMessage.Content.ToLower().Contains("error") is true) - { - // ask coder to fix the error - return true; - } - - return false; - }); - var runner2AdminTransition = Transition.Create(runner, admin); - - var workflow = new Graph( - [ - admin2CoderTransition, - coder2ReviewerTransition, - reviewer2RunnerTransition, - reviewer2CoderTransition, - runner2CoderTransition, - runner2AdminTransition, - ]); - #endregion create_workflow - - #region create_group_chat_with_workflow - var groupChat = new GroupChat( - admin: admin, - workflow: workflow, - members: - [ - admin, - coder, - runner, - reviewer, - ]); - #endregion create_group_chat_with_workflow - admin.SendIntroduction("Welcome to my group, work together to resolve my task", groupChat); - coder.SendIntroduction("I will write dotnet code to resolve task", groupChat); - reviewer.SendIntroduction("I will review dotnet code", groupChat); - runner.SendIntroduction("I will run dotnet code once the review is done", groupChat); - var task = "What's the 39th of fibonacci number?"; - - var taskMessage = new TextMessage(Role.User, task, from: admin.Name); - await foreach (var message in groupChat.SendAsync([taskMessage], maxRound: 10)) - { - // teminate chat if message is from runner and run successfully - if (message.From == "runner" && message.GetContent().Contains(the39thFibonacciNumber.ToString())) - { - Console.WriteLine($"The 39th of fibonacci number is {the39thFibonacciNumber}"); - break; - } - } - } - - public static async Task RunAsync() - { - long the39thFibonacciNumber = 63245986; - var workDir = Path.Combine(Path.GetTempPath(), "InteractiveService"); - if (!Directory.Exists(workDir)) - { - Directory.CreateDirectory(workDir); - } - - var config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - var openaiClient = new OpenAIClient(new Uri(config.Endpoint), new Azure.AzureKeyCredential(config.ApiKey)); - - var kernel = DotnetInteractiveKernelBuilder - .CreateDefaultInProcessKernelBuilder() - .Build(); - #region create_group_chat - var reviewer = await CreateReviewerAgentAsync(openaiClient, config.DeploymentName); - var coder = await CreateCoderAgentAsync(openaiClient, config.DeploymentName); - var runner = await CreateRunnerAgentAsync(kernel); - var admin = await CreateAdminAsync(openaiClient, config.DeploymentName); - var groupChat = new GroupChat( - admin: admin, - members: - [ - coder, - runner, - reviewer, - ]); - - coder.SendIntroduction("I will write dotnet code to resolve task", groupChat); - reviewer.SendIntroduction("I will review dotnet code", groupChat); - runner.SendIntroduction("I will run dotnet code once the review is done", groupChat); - - var task = "What's the 39th of fibonacci number?"; - var taskMessage = new TextMessage(Role.User, task); - await foreach (var message in groupChat.SendAsync([taskMessage], maxRound: 10)) - { - // teminate chat if message is from runner and run successfully - if (message.From == "runner" && message.GetContent().Contains(the39thFibonacciNumber.ToString())) - { - Console.WriteLine($"The 39th of fibonacci number is {the39thFibonacciNumber}"); - break; - } - } - #endregion create_group_chat - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/Example08_LMStudio.cs b/dotnet/sample/AutoGen.BasicSamples/Example08_LMStudio.cs deleted file mode 100644 index 7ee01f802c..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Example08_LMStudio.cs +++ /dev/null @@ -1,50 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Example08_LMStudio.cs - -#region lmstudio_using_statements -using AutoGen.Core; -using AutoGen.LMStudio; -#endregion lmstudio_using_statements - -namespace AutoGen.BasicSample; - -public class Example08_LMStudio -{ - public static async Task RunAsync() - { - #region lmstudio_example_1 - var config = new LMStudioConfig("localhost", 1234); - var lmAgent = new LMStudioAgent("asssistant", config: config) - .RegisterPrintMessage(); - - await lmAgent.SendAsync("Can you write a piece of C# code to calculate 100th of fibonacci?"); - - // output from assistant (the output below is generated using llama-2-chat-7b, the output may vary depending on the model used) - // - // Of course! To calculate the 100th number in the Fibonacci sequence using C#, you can use the following code:``` - // using System; - // class FibonacciSequence { - // static int Fibonacci(int n) { - // if (n <= 1) { - // return 1; - // } else { - // return Fibonacci(n - 1) + Fibonacci(n - 2); - // } - // } - // static void Main() { - // Console.WriteLine("The 100th number in the Fibonacci sequence is: " + Fibonacci(100)); - // } - // } - // ``` - // In this code, we define a function `Fibonacci` that takes an integer `n` as input and returns the `n`-th number in the Fibonacci sequence. The function uses a recursive approach to calculate the value of the sequence. - // The `Main` method simply calls the `Fibonacci` function with the argument `100`, and prints the result to the console. - // Note that this code will only work for positive integers `n`. If you want to calculate the Fibonacci sequence for other types of numbers, such as real or complex numbers, you will need to modify the code accordingly. - #endregion lmstudio_example_1 - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/Example09_LMStudio_FunctionCall.cs b/dotnet/sample/AutoGen.BasicSamples/Example09_LMStudio_FunctionCall.cs deleted file mode 100644 index 88b71c54c8..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Example09_LMStudio_FunctionCall.cs +++ /dev/null @@ -1,143 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Example09_LMStudio_FunctionCall.cs - -using System.Text.Json; -using System.Text.Json.Serialization; -using AutoGen.Core; -using AutoGen.LMStudio; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; - -namespace AutoGen.BasicSample; - -public class LLaMAFunctionCall -{ - [JsonPropertyName("name")] - public string Name { get; set; } - - [JsonPropertyName("arguments")] - public JsonElement Arguments { get; set; } -} - -public partial class Example09_LMStudio_FunctionCall -{ - /// - /// Get weather from location. - /// - /// location - /// date. type is string - [Function] - public async Task GetWeather(string location, string date) - { - return $"[Function] The weather on {date} in {location} is sunny."; - } - - - /// - /// Search query on Google and return the results. - /// - /// search query - [Function] - public async Task GoogleSearch(string query) - { - return $"[Function] Here are the search results for {query}."; - } - - private static object SerializeFunctionDefinition(FunctionDefinition functionDefinition) - { - return new - { - type = "function", - function = new - { - name = functionDefinition.Name, - description = functionDefinition.Description, - parameters = functionDefinition.Parameters.ToObjectFromJson(), - } - }; - } - - public static async Task RunAsync() - { - #region lmstudio_function_call_example - // This example has been verified to work with Trelis-Llama-2-7b-chat-hf-function-calling-v3 - var instance = new Example09_LMStudio_FunctionCall(); - var config = new LMStudioConfig("localhost", 1234); - var systemMessage = @$"You are a helpful AI assistant."; - - // Because the LM studio server doesn't support openai function call yet - // To simulate the function call, we can put the function call details in the system message - // And ask agent to response in function call object format using few-shot example - object[] functionList = - [ - SerializeFunctionDefinition(instance.GetWeatherFunctionContract.ToOpenAIFunctionDefinition()), - SerializeFunctionDefinition(instance.GetWeatherFunctionContract.ToOpenAIFunctionDefinition()) - ]; - var functionListString = JsonSerializer.Serialize(functionList, new JsonSerializerOptions { WriteIndented = true }); - var lmAgent = new LMStudioAgent( - name: "assistant", - systemMessage: @$" -You are a helpful AI assistant -You have access to the following functions. Use them if required: - -{functionListString}", - config: config) - .RegisterMiddleware(async (msgs, option, innerAgent, ct) => - { - // inject few-shot example to the message - var exampleGetWeather = new TextMessage(Role.User, "Get weather in London"); - var exampleAnswer = new TextMessage(Role.Assistant, "{\n \"name\": \"GetWeather\",\n \"arguments\": {\n \"city\": \"London\"\n }\n}", from: innerAgent.Name); - - msgs = new[] { exampleGetWeather, exampleAnswer }.Concat(msgs).ToArray(); - var reply = await innerAgent.GenerateReplyAsync(msgs, option, ct); - - // if reply is a function call, invoke function - var content = reply.GetContent(); - try - { - if (JsonSerializer.Deserialize(content) is { } functionCall) - { - var arguments = JsonSerializer.Serialize(functionCall.Arguments); - // invoke function wrapper - if (functionCall.Name == instance.GetWeatherFunctionContract.Name) - { - var result = await instance.GetWeatherWrapper(arguments); - return new TextMessage(Role.Assistant, result); - } - else if (functionCall.Name == instance.GetWeatherFunctionContract.Name) - { - var result = await instance.GoogleSearchWrapper(arguments); - return new TextMessage(Role.Assistant, result); - } - else - { - throw new Exception($"Unknown function call: {functionCall.Name}"); - } - } - } - catch (JsonException) - { - // ignore - } - - return reply; - }) - .RegisterPrintMessage(); - - var userProxyAgent = new UserProxyAgent( - name: "user", - humanInputMode: HumanInputMode.ALWAYS); - - await userProxyAgent.SendAsync( - receiver: lmAgent, - "Search the names of the five largest stocks in the US by market cap ") - .ToArrayAsync(); - #endregion lmstudio_function_call_example - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/Example10_SemanticKernel.cs b/dotnet/sample/AutoGen.BasicSamples/Example10_SemanticKernel.cs deleted file mode 100644 index fe706037d2..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Example10_SemanticKernel.cs +++ /dev/null @@ -1,86 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Example10_SemanticKernel.cs - -using System.ComponentModel; -using AutoGen.Core; -using AutoGen.SemanticKernel.Extension; -using FluentAssertions; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; -namespace AutoGen.BasicSample; - -public class LightPlugin -{ - public bool IsOn { get; set; } = false; - - [KernelFunction] - [Description("Gets the state of the light.")] - public string GetState() => this.IsOn ? "on" : "off"; - - [KernelFunction] - [Description("Changes the state of the light.'")] - public string ChangeState(bool newState) - { - this.IsOn = newState; - var state = this.GetState(); - - // Print the state to the console - Console.ForegroundColor = ConsoleColor.DarkBlue; - Console.WriteLine($"[Light is now {state}]"); - Console.ResetColor(); - - return state; - } -} - -public class Example10_SemanticKernel -{ - public static async Task RunAsync() - { - var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var modelId = "gpt-3.5-turbo"; - var builder = Kernel.CreateBuilder() - .AddOpenAIChatCompletion(modelId: modelId, apiKey: openAIKey); - var kernel = builder.Build(); - var settings = new OpenAIPromptExecutionSettings - { - ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions, - }; - - kernel.Plugins.AddFromObject(new LightPlugin()); - var skAgent = kernel - .ToSemanticKernelAgent(name: "assistant", systemMessage: "You control the light", settings); - - // Send a message to the skAgent, the skAgent supports the following message types: - // - IMessage - // - (streaming) IMessage - // You can create an IMessage using MessageEnvelope.Create - var chatMessageContent = MessageEnvelope.Create(new ChatMessageContent(AuthorRole.User, "Toggle the light")); - var reply = await skAgent.SendAsync(chatMessageContent); - reply.Should().BeOfType>(); - Console.WriteLine((reply as IMessage).Content.Items[0].As().Text); - - var skAgentWithMiddleware = skAgent - .RegisterMessageConnector() // Register the message connector to support more AutoGen built-in message types - .RegisterPrintMessage(); - - // Now the skAgentWithMiddleware supports more IMessage types like TextMessage, ImageMessage or MultiModalMessage - // It also register a print format message hook to print the message in a human readable format to the console - await skAgent.SendAsync(chatMessageContent); - await skAgentWithMiddleware.SendAsync(new TextMessage(Role.User, "Toggle the light")); - - // The more message type an agent support, the more flexible it is to be used in different scenarios - // For example, since the TextMessage is supported, the skAgentWithMiddleware can be used with user proxy. - var userProxy = new UserProxyAgent("user"); - - await skAgentWithMiddleware.InitiateChatAsync(userProxy, "how can I help you today"); - } - -} diff --git a/dotnet/sample/AutoGen.BasicSamples/Example11_Sequential_GroupChat_Example.cs b/dotnet/sample/AutoGen.BasicSamples/Example11_Sequential_GroupChat_Example.cs deleted file mode 100644 index 503cf76511..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Example11_Sequential_GroupChat_Example.cs +++ /dev/null @@ -1,100 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Example11_Sequential_GroupChat_Example.cs - -#region using_statement -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using AutoGen.SemanticKernel; -using AutoGen.SemanticKernel.Extension; -using Azure.AI.OpenAI; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Plugins.Web; -using Microsoft.SemanticKernel.Plugins.Web.Bing; -#endregion using_statement - -namespace AutoGen.BasicSample; - -public partial class Sequential_GroupChat_Example -{ - public static async Task CreateBingSearchAgentAsync() - { - #region CreateBingSearchAgent - var config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - var apiKey = config.ApiKey; - var kernelBuilder = Kernel.CreateBuilder() - .AddAzureOpenAIChatCompletion(config.DeploymentName, config.Endpoint, apiKey); - var bingApiKey = Environment.GetEnvironmentVariable("BING_API_KEY") ?? throw new Exception("BING_API_KEY environment variable is not set"); - var bingSearch = new BingConnector(bingApiKey); - var webSearchPlugin = new WebSearchEnginePlugin(bingSearch); - kernelBuilder.Plugins.AddFromObject(webSearchPlugin); - - var kernel = kernelBuilder.Build(); - var kernelAgent = new SemanticKernelAgent( - kernel: kernel, - name: "bing-search", - systemMessage: """ - You search results from Bing and return it as-is. - You put the original search result between ```bing and ``` - - e.g. - ```bing - xxx - ``` - """) - .RegisterMessageConnector() - .RegisterPrintMessage(); // pretty print the message - - return kernelAgent; - #endregion CreateBingSearchAgent - } - - public static async Task CreateSummarizerAgentAsync() - { - #region CreateSummarizerAgent - var config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - var apiKey = config.ApiKey; - var endPoint = new Uri(config.Endpoint); - - var openAIClient = new OpenAIClient(endPoint, new Azure.AzureKeyCredential(apiKey)); - var openAIClientAgent = new OpenAIChatAgent( - openAIClient: openAIClient, - name: "summarizer", - modelName: config.DeploymentName, - systemMessage: "You summarize search result from bing in a short and concise manner"); - - return openAIClientAgent - .RegisterMessageConnector() - .RegisterPrintMessage(); // pretty print the message - #endregion CreateSummarizerAgent - } - - public static async Task RunAsync() - { - #region Sequential_GroupChat_Example - var userProxyAgent = new UserProxyAgent( - name: "user", - humanInputMode: HumanInputMode.ALWAYS) - .RegisterPrintMessage(); - - var bingSearchAgent = await CreateBingSearchAgentAsync(); - var summarizerAgent = await CreateSummarizerAgentAsync(); - - var groupChat = new RoundRobinGroupChat( - agents: [userProxyAgent, bingSearchAgent, summarizerAgent]); - - var groupChatAgent = new GroupChatManager(groupChat); - - var history = await userProxyAgent.InitiateChatAsync( - receiver: groupChatAgent, - message: "How to deploy an openai resource on azure", - maxRound: 10); - #endregion Sequential_GroupChat_Example - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/Example12_TwoAgent_Fill_Application.cs b/dotnet/sample/AutoGen.BasicSamples/Example12_TwoAgent_Fill_Application.cs deleted file mode 100644 index ef149ea7a0..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Example12_TwoAgent_Fill_Application.cs +++ /dev/null @@ -1,194 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Example12_TwoAgent_Fill_Application.cs - -using System.Text; -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; - -namespace AutoGen.BasicSample; - -public partial class TwoAgent_Fill_Application -{ - private string? name = null; - private string? email = null; - private string? phone = null; - private string? address = null; - private bool? receiveUpdates = null; - - [Function] - public async Task SaveProgress( - string name, - string email, - string phone, - string address, - bool? receiveUpdates) - { - this.name = !string.IsNullOrEmpty(name) ? name : this.name; - this.email = !string.IsNullOrEmpty(email) ? email : this.email; - this.phone = !string.IsNullOrEmpty(phone) ? phone : this.phone; - this.address = !string.IsNullOrEmpty(address) ? address : this.address; - this.receiveUpdates = receiveUpdates ?? this.receiveUpdates; - - var missingInformationStringBuilder = new StringBuilder(); - if (string.IsNullOrEmpty(this.name)) - { - missingInformationStringBuilder.AppendLine("Name is missing."); - } - - if (string.IsNullOrEmpty(this.email)) - { - missingInformationStringBuilder.AppendLine("Email is missing."); - } - - if (string.IsNullOrEmpty(this.phone)) - { - missingInformationStringBuilder.AppendLine("Phone is missing."); - } - - if (string.IsNullOrEmpty(this.address)) - { - missingInformationStringBuilder.AppendLine("Address is missing."); - } - - if (this.receiveUpdates == null) - { - missingInformationStringBuilder.AppendLine("ReceiveUpdates is missing."); - } - - if (missingInformationStringBuilder.Length > 0) - { - return missingInformationStringBuilder.ToString(); - } - else - { - return "Application information is saved to database."; - } - } - - public static async Task CreateSaveProgressAgent() - { - var gpt3Config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - var endPoint = gpt3Config.Endpoint ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - var apiKey = gpt3Config.ApiKey ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - var openaiClient = new OpenAIClient(new Uri(endPoint), new Azure.AzureKeyCredential(apiKey)); - - var instance = new TwoAgent_Fill_Application(); - var functionCallConnector = new FunctionCallMiddleware( - functions: [instance.SaveProgressFunctionContract], - functionMap: new Dictionary>> - { - { instance.SaveProgressFunctionContract.Name, instance.SaveProgressWrapper }, - }); - - var chatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "application", - modelName: gpt3Config.DeploymentName, - systemMessage: """You are a helpful application form assistant who saves progress while user fills application.""") - .RegisterMessageConnector() - .RegisterMiddleware(functionCallConnector) - .RegisterMiddleware(async (msgs, option, agent, ct) => - { - var lastUserMessage = msgs.Last() ?? throw new Exception("No user message found."); - var prompt = $""" - Save progress according to the most recent information provided by user. - - ```user - {lastUserMessage.GetContent()} - ``` - """; - - return await agent.GenerateReplyAsync([lastUserMessage], option, ct); - - }); - - return chatAgent; - } - - public static async Task CreateAssistantAgent() - { - var gpt3Config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - var endPoint = gpt3Config.Endpoint ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - var apiKey = gpt3Config.ApiKey ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - var openaiClient = new OpenAIClient(new Uri(endPoint), new Azure.AzureKeyCredential(apiKey)); - - var chatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "assistant", - modelName: gpt3Config.DeploymentName, - systemMessage: """You create polite prompt to ask user provide missing information""") - .RegisterMessageConnector() - .RegisterPrintMessage(); - - return chatAgent; - } - - public static async Task CreateUserAgent() - { - var gpt3Config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - var endPoint = gpt3Config.Endpoint ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - var apiKey = gpt3Config.ApiKey ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - var openaiClient = new OpenAIClient(new Uri(endPoint), new Azure.AzureKeyCredential(apiKey)); - - var chatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "user", - modelName: gpt3Config.DeploymentName, - systemMessage: """ - You are a user who is filling an application form. Simply provide the information as requested and answer the questions, don't do anything else. - - here's some personal information about you: - - name: John Doe - - email: 1234567@gmail.com - - phone: 123-456-7890 - - address: 1234 Main St, Redmond, WA 98052 - - want to receive update? true - """) - .RegisterMessageConnector() - .RegisterPrintMessage(); - - return chatAgent; - } - - public static async Task RunAsync() - { - var applicationAgent = await CreateSaveProgressAgent(); - var assistantAgent = await CreateAssistantAgent(); - var userAgent = await CreateUserAgent(); - - var userToApplicationTransition = Transition.Create(userAgent, applicationAgent); - var applicationToAssistantTransition = Transition.Create(applicationAgent, assistantAgent); - var assistantToUserTransition = Transition.Create(assistantAgent, userAgent); - - var workflow = new Graph( - [ - userToApplicationTransition, - applicationToAssistantTransition, - assistantToUserTransition, - ]); - - var groupChat = new GroupChat( - members: [userAgent, applicationAgent, assistantAgent], - workflow: workflow); - - var groupChatManager = new GroupChatManager(groupChat); - var initialMessage = await assistantAgent.SendAsync("Generate a greeting meesage for user and start the conversation by asking what's their name."); - - var chatHistory = new List { initialMessage }; - await foreach (var msg in userAgent.SendAsync(groupChatManager, chatHistory, maxRound: 30)) - { - if (msg.GetContent().ToLower().Contains("application information is saved to database.") is true) - { - break; - } - } - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/Example13_OpenAIAgent_JsonMode.cs b/dotnet/sample/AutoGen.BasicSamples/Example13_OpenAIAgent_JsonMode.cs deleted file mode 100644 index 00deded3c0..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Example13_OpenAIAgent_JsonMode.cs +++ /dev/null @@ -1,11 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Example13_OpenAIAgent_JsonMode.cs - -// this example has been moved to https://github.com/microsoft/autogen/blob/main/dotnet/sample/AutoGen.OpenAI.Sample/Use_Json_Mode.cs - diff --git a/dotnet/sample/AutoGen.BasicSamples/Example14_MistralClientAgent_TokenCount.cs b/dotnet/sample/AutoGen.BasicSamples/Example14_MistralClientAgent_TokenCount.cs deleted file mode 100644 index bf875db3e9..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Example14_MistralClientAgent_TokenCount.cs +++ /dev/null @@ -1,71 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Example14_MistralClientAgent_TokenCount.cs - -#region using_statements -using AutoGen.Core; -using AutoGen.Mistral; -#endregion using_statements -using FluentAssertions; - -namespace AutoGen.BasicSample; - -public class Example14_MistralClientAgent_TokenCount -{ - #region token_counter_middleware - public class MistralAITokenCounterMiddleware : IMiddleware - { - private readonly List responses = new List(); - public string? Name => nameof(MistralAITokenCounterMiddleware); - - public async Task InvokeAsync(MiddlewareContext context, IAgent agent, CancellationToken cancellationToken = default) - { - var reply = await agent.GenerateReplyAsync(context.Messages, context.Options, cancellationToken); - - if (reply is IMessage message) - { - responses.Add(message.Content); - } - - return reply; - } - - public int GetCompletionTokenCount() - { - return responses.Sum(r => r.Usage.CompletionTokens); - } - } - #endregion token_counter_middleware - - public static async Task RunAsync() - { - #region create_mistral_client_agent - var apiKey = Environment.GetEnvironmentVariable("MISTRAL_API_KEY") ?? throw new Exception("Missing MISTRAL_API_KEY environment variable."); - var mistralClient = new MistralClient(apiKey); - var agent = new MistralClientAgent( - client: mistralClient, - name: "assistant", - model: MistralAIModelID.OPEN_MISTRAL_7B); - #endregion create_mistral_client_agent - - #region register_middleware - var tokenCounterMiddleware = new MistralAITokenCounterMiddleware(); - var mistralMessageConnector = new MistralChatMessageConnector(); - var agentWithTokenCounter = agent - .RegisterMiddleware(tokenCounterMiddleware) - .RegisterMiddleware(mistralMessageConnector) - .RegisterPrintMessage(); - #endregion register_middleware - - #region chat_with_agent - await agentWithTokenCounter.SendAsync("write a long, tedious story"); - Console.WriteLine($"Completion token count: {tokenCounterMiddleware.GetCompletionTokenCount()}"); - tokenCounterMiddleware.GetCompletionTokenCount().Should().BeGreaterThan(0); - #endregion chat_with_agent - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/Example15_GPT4V_BinaryDataImageMessage.cs b/dotnet/sample/AutoGen.BasicSamples/Example15_GPT4V_BinaryDataImageMessage.cs deleted file mode 100644 index a45cd49025..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Example15_GPT4V_BinaryDataImageMessage.cs +++ /dev/null @@ -1,71 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Example15_GPT4V_BinaryDataImageMessage.cs - -using AutoGen.Core; -using AutoGen.OpenAI.V1; - -namespace AutoGen.BasicSample; - -/// -/// This example shows usage of ImageMessage. The image is loaded as BinaryData and sent to GPT-4V -///
-///
-/// Add additional images to the ImageResources to load and send more images to GPT-4V -///
-public static class Example15_GPT4V_BinaryDataImageMessage -{ - private static readonly string ImageResourcePath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "resource", "images"); - - private static Dictionary _mediaTypeMappings = new() - { - { ".png", "image/png" }, - { ".jpeg", "image/jpeg" }, - { ".jpg", "image/jpeg" }, - { ".gif", "image/gif" }, - { ".webp", "image/webp" } - }; - - public static async Task RunAsync() - { - var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var openAiConfig = new OpenAIConfig(openAIKey, "gpt-4o"); - - var visionAgent = new GPTAgent( - name: "gpt", - systemMessage: "You are a helpful AI assistant", - config: openAiConfig, - temperature: 0) - .RegisterPrintMessage(); - - List messages = - [new TextMessage(Role.User, "What is this image?", from: "user")]; - AddMessagesFromResource(ImageResourcePath, messages); - - var multiModalMessage = new MultiModalMessage(Role.User, messages, from: "user"); - var response = await visionAgent.SendAsync(multiModalMessage); - } - - private static void AddMessagesFromResource(string imageResourcePath, List messages) - { - foreach (string file in Directory.GetFiles(imageResourcePath)) - { - if (!_mediaTypeMappings.TryGetValue(Path.GetExtension(file).ToLowerInvariant(), out var mediaType)) - { - continue; - } - - using var fs = new FileStream(file, FileMode.Open, FileAccess.Read); - var ms = new MemoryStream(); - fs.CopyTo(ms); - ms.Seek(0, SeekOrigin.Begin); - var imageData = BinaryData.FromStream(ms, mediaType); - messages.Add(new ImageMessage(Role.Assistant, imageData, from: "user")); - } - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/Example16_OpenAIChatAgent_ConnectToThirdPartyBackend.cs b/dotnet/sample/AutoGen.BasicSamples/Example16_OpenAIChatAgent_ConnectToThirdPartyBackend.cs deleted file mode 100644 index 8173fe1aa9..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Example16_OpenAIChatAgent_ConnectToThirdPartyBackend.cs +++ /dev/null @@ -1,10 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Example16_OpenAIChatAgent_ConnectToThirdPartyBackend.cs - -// this example has been moved to https://github.com/microsoft/autogen/blob/main/dotnet/sample/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs diff --git a/dotnet/sample/AutoGen.BasicSamples/Example17_ReActAgent.cs b/dotnet/sample/AutoGen.BasicSamples/Example17_ReActAgent.cs deleted file mode 100644 index 4412510a39..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Example17_ReActAgent.cs +++ /dev/null @@ -1,193 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Example17_ReActAgent.cs - -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; - -namespace AutoGen.BasicSample; - -public class OpenAIReActAgent : IAgent -{ - private readonly OpenAIClient _client; - private readonly string modelName = "gpt-3.5-turbo"; - private readonly FunctionContract[] tools; - private readonly Dictionary>> toolExecutors = new(); - private readonly IAgent reasoner; - private readonly IAgent actor; - private readonly IAgent helper; - private readonly int maxSteps = 10; - - private const string ReActPrompt = @"Answer the following questions as best you can. -You can invoke the following tools: -{tools} - -Use the following format: - -Question: the input question you must answer -Thought: you should always think about what to do -Tool: the tool to invoke -Tool Input: the input to the tool -Observation: the invoke result of the tool -... (this process can repeat multiple times) - -Once you have the final answer, provide the final answer in the following format: -Thought: I now know the final answer -Final Answer: the final answer to the original input question - -Begin! -Question: {input}"; - - public OpenAIReActAgent(OpenAIClient client, string modelName, string name, FunctionContract[] tools, Dictionary>> toolExecutors) - { - _client = client; - this.Name = name; - this.modelName = modelName; - this.tools = tools; - this.toolExecutors = toolExecutors; - this.reasoner = CreateReasoner(); - this.actor = CreateActor(); - this.helper = new OpenAIChatAgent(client, "helper", modelName) - .RegisterMessageConnector(); - } - - public string Name { get; } - - public async Task GenerateReplyAsync(IEnumerable messages, GenerateReplyOptions? options = null, CancellationToken cancellationToken = default) - { - // step 1: extract the input question - var userQuestion = await helper.SendAsync("Extract the question from chat history", chatHistory: messages); - if (userQuestion.GetContent() is not string question) - { - return new TextMessage(Role.Assistant, "I couldn't find a question in the chat history. Please ask a question.", from: Name); - } - var reactPrompt = CreateReActPrompt(question); - var promptMessage = new TextMessage(Role.User, reactPrompt); - var chatHistory = new List() { promptMessage }; - - // step 2: ReAct - for (int i = 0; i != this.maxSteps; i++) - { - // reasoning - var reasoning = await reasoner.SendAsync(chatHistory: chatHistory); - if (reasoning.GetContent() is not string reasoningContent) - { - return new TextMessage(Role.Assistant, "I couldn't find a reasoning in the chat history. Please provide a reasoning.", from: Name); - } - if (reasoningContent.Contains("I now know the final answer")) - { - return new TextMessage(Role.Assistant, reasoningContent, from: Name); - } - - chatHistory.Add(reasoning); - - // action - var action = await actor.SendAsync(reasoning); - chatHistory.Add(action); - } - - // fail to find the final answer - // return the summary of the chat history - var summary = await helper.SendAsync("Summarize the chat history and find out what's missing", chatHistory: chatHistory); - summary.From = Name; - - return summary; - } - - private string CreateReActPrompt(string input) - { - var toolPrompt = tools.Select(t => $"{t.Name}: {t.Description}").Aggregate((a, b) => $"{a}\n{b}"); - var prompt = ReActPrompt.Replace("{tools}", toolPrompt); - prompt = prompt.Replace("{input}", input); - return prompt; - } - - private IAgent CreateReasoner() - { - return new OpenAIChatAgent( - openAIClient: _client, - modelName: modelName, - name: "reasoner") - .RegisterMessageConnector() - .RegisterPrintMessage(); - } - - private IAgent CreateActor() - { - var functionCallMiddleware = new FunctionCallMiddleware(tools, toolExecutors); - return new OpenAIChatAgent( - openAIClient: _client, - modelName: modelName, - name: "actor") - .RegisterMessageConnector() - .RegisterMiddleware(functionCallMiddleware) - .RegisterPrintMessage(); - } -} - -public partial class Tools -{ - /// - /// Get weather report for a specific place on a specific date - /// - /// city - /// date as DD/MM/YYYY - [Function] - public async Task WeatherReport(string city, string date) - { - return $"Weather report for {city} on {date} is sunny"; - } - - /// - /// Get current localization - /// - [Function] - public async Task GetLocalization(string dummy) - { - return $"Paris"; - } - - /// - /// Get current date as DD/MM/YYYY - /// - [Function] - public async Task GetDateToday(string dummy) - { - return $"27/05/2024"; - } -} - -public class Example17_ReActAgent -{ - public static async Task RunAsync() - { - var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var modelName = "gpt-4-turbo"; - var tools = new Tools(); - var openAIClient = new OpenAIClient(openAIKey); - var reactAgent = new OpenAIReActAgent( - client: openAIClient, - modelName: modelName, - name: "react-agent", - tools: [tools.GetLocalizationFunctionContract, tools.GetDateTodayFunctionContract, tools.WeatherReportFunctionContract], - toolExecutors: new Dictionary>> - { - { tools.GetLocalizationFunctionContract.Name, tools.GetLocalizationWrapper }, - { tools.GetDateTodayFunctionContract.Name, tools.GetDateTodayWrapper }, - { tools.WeatherReportFunctionContract.Name, tools.WeatherReportWrapper }, - } - ) - .RegisterPrintMessage(); - - var message = new TextMessage(Role.User, "What is the weather here", from: "user"); - - var response = await reactAgent.SendAsync(message); - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Agent_Middleware.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Agent_Middleware.cs deleted file mode 100644 index e92489c718..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Agent_Middleware.cs +++ /dev/null @@ -1,86 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Agent_Middleware.cs - -#region Using -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -#endregion Using -using FluentAssertions; - -namespace AutoGen.BasicSample; - -public class Agent_Middleware -{ - public static async Task RunTokenCountAsync() - { - #region Create_Agent - var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new InvalidOperationException("Please set the environment variable OPENAI_API_KEY"); - var model = "gpt-3.5-turbo"; - var openaiClient = new OpenAIClient(apiKey); - var openaiMessageConnector = new OpenAIChatRequestMessageConnector(); - var totalTokenCount = 0; - var agent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "agent", - modelName: model, - systemMessage: "You are a helpful AI assistant") - .RegisterMiddleware(async (messages, option, innerAgent, ct) => - { - var reply = await innerAgent.GenerateReplyAsync(messages, option, ct); - if (reply is MessageEnvelope chatCompletions) - { - var tokenCount = chatCompletions.Content.Usage.TotalTokens; - totalTokenCount += tokenCount; - } - return reply; - }) - .RegisterMiddleware(openaiMessageConnector); - #endregion Create_Agent - - #region Chat_With_Agent - var reply = await agent.SendAsync("Tell me a joke"); - Console.WriteLine($"Total token count: {totalTokenCount}"); - #endregion Chat_With_Agent - - #region verify_reply - reply.Should().BeOfType(); - totalTokenCount.Should().BeGreaterThan(0); - #endregion verify_reply - } - - public static async Task RunRagTaskAsync() - { - #region Create_Agent - var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new InvalidOperationException("Please set the environment variable OPENAI_API_KEY"); - var model = "gpt-3.5-turbo"; - var openaiClient = new OpenAIClient(apiKey); - var openaiMessageConnector = new OpenAIChatRequestMessageConnector(); - var agent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "agent", - modelName: model, - systemMessage: "You are a helpful AI assistant") - .RegisterMessageConnector() - .RegisterMiddleware(async (messages, option, innerAgent, ct) => - { - var today = DateTime.UtcNow; - var todayMessage = new TextMessage(Role.System, $"Today is {today:yyyy-MM-dd}"); - messages = messages.Concat(new[] { todayMessage }); - return await innerAgent.GenerateReplyAsync(messages, option, ct); - }) - .RegisterPrintMessage(); - #endregion Create_Agent - - #region Chat_With_Agent - var reply = await agent.SendAsync("what's the date today"); - #endregion Chat_With_Agent - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Chat_With_Agent.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Chat_With_Agent.cs deleted file mode 100644 index 18da596d6d..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Chat_With_Agent.cs +++ /dev/null @@ -1,65 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Chat_With_Agent.cs - -#region Using -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -#endregion Using - -using FluentAssertions; - -namespace AutoGen.BasicSample; - -public class Chat_With_Agent -{ - public static async Task RunAsync() - { - #region Create_Agent - var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var model = "gpt-3.5-turbo"; - var openaiClient = new OpenAIClient(apiKey); - var agent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "agent", - modelName: model, - systemMessage: "You are a helpful AI assistant") - .RegisterMessageConnector(); // convert OpenAI message to AutoGen message - #endregion Create_Agent - - #region Chat_With_Agent - var reply = await agent.SendAsync("Tell me a joke"); - reply.Should().BeOfType(); - if (reply is TextMessage textMessage) - { - Console.WriteLine(textMessage.Content); - } - #endregion Chat_With_Agent - - #region Chat_With_History - reply = await agent.SendAsync("summarize the conversation", chatHistory: [reply]); - #endregion Chat_With_History - - #region Streaming_Chat - var question = new TextMessage(Role.User, "Tell me a long joke"); - await foreach (var streamingReply in agent.GenerateStreamingReplyAsync([question])) - { - if (streamingReply is TextMessageUpdate textMessageUpdate) - { - Console.WriteLine(textMessageUpdate.Content); - } - } - #endregion Streaming_Chat - - #region verify_reply - reply.Should().BeOfType(); - #endregion verify_reply - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Dynamic_Group_Chat.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Dynamic_Group_Chat.cs deleted file mode 100644 index a82cc33f02..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Dynamic_Group_Chat.cs +++ /dev/null @@ -1,97 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Dynamic_Group_Chat.cs - -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using AutoGen.SemanticKernel; -using AutoGen.SemanticKernel.Extension; -using Azure.AI.OpenAI; -using Microsoft.SemanticKernel; - -namespace AutoGen.BasicSample; - -public class Dynamic_Group_Chat -{ - public static async Task RunAsync() - { - var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var model = "gpt-3.5-turbo"; - - #region Create_Coder - var openaiClient = new OpenAIClient(apiKey); - var coder = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "coder", - modelName: model, - systemMessage: "You are a C# coder, when writing csharp code, please put the code between ```csharp and ```") - .RegisterMessageConnector() // convert OpenAI message to AutoGen message - .RegisterPrintMessage(); // print the message content - #endregion Create_Coder - - #region Create_Commenter - var kernel = Kernel - .CreateBuilder() - .AddOpenAIChatCompletion(modelId: model, apiKey: apiKey) - .Build(); - var commenter = new SemanticKernelAgent( - kernel: kernel, - name: "commenter", - systemMessage: "You write inline comments for the code snippet and add unit tests if necessary") - .RegisterMessageConnector() // register message connector so it support AutoGen built-in message types like TextMessage. - .RegisterPrintMessage(); // pretty print the message to the console - #endregion Create_Commenter - - #region Create_UserProxy - var userProxy = new DefaultReplyAgent("user", defaultReply: "END") - .RegisterPrintMessage(); // print the message content - #endregion Create_UserProxy - - #region Create_Group - var admin = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "admin", - modelName: model) - .RegisterMessageConnector(); // convert OpenAI message to AutoGen message - - var group = new GroupChat( - members: [coder, commenter, userProxy], - admin: admin); - #endregion Create_Group - - #region Chat_With_Group - var workflowInstruction = new TextMessage( - Role.User, - """ - Here is the workflow of this group chat: - User{Ask a question} -> Coder{Write code} - Coder{Write code} -> Commenter{Add comments to the code} - Commenter{Add comments to the code} -> User{END} - """); - - var question = new TextMessage(Role.User, "How to calculate the 100th Fibonacci number?"); - var chatHistory = new List { workflowInstruction, question }; - while (true) - { - var replies = await group.CallAsync(chatHistory, maxRound: 1); - var lastReply = replies.Last(); - chatHistory.Add(lastReply); - - if (lastReply.From == userProxy.Name) - { - break; - } - } - #endregion Chat_With_Group - - #region Summarize_Chat_History - var summary = await coder.SendAsync("summarize the conversation", chatHistory: chatHistory); - #endregion Summarize_Chat_History - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/FSM_Group_Chat.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/FSM_Group_Chat.cs deleted file mode 100644 index 4dc12d3cb3..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/GettingStart/FSM_Group_Chat.cs +++ /dev/null @@ -1,196 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// FSM_Group_Chat.cs - -using System.Text; -#region Using -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -#endregion Using - -namespace AutoGen.BasicSample; - -#region FillFormTool -public partial class FillFormTool -{ - private string? name = null; - private string? email = null; - private string? phone = null; - private string? address = null; - private bool? receiveUpdates = null; - - [Function] - public async Task SaveProgress( - string name, - string email, - string phone, - string address, - bool? receiveUpdates) - { - this.name = !string.IsNullOrEmpty(name) ? name : this.name; - this.email = !string.IsNullOrEmpty(email) ? email : this.email; - this.phone = !string.IsNullOrEmpty(phone) ? phone : this.phone; - this.address = !string.IsNullOrEmpty(address) ? address : this.address; - this.receiveUpdates = receiveUpdates ?? this.receiveUpdates; - - var missingInformationStringBuilder = new StringBuilder(); - if (string.IsNullOrEmpty(this.name)) - { - missingInformationStringBuilder.AppendLine("Name is missing."); - } - - if (string.IsNullOrEmpty(this.email)) - { - missingInformationStringBuilder.AppendLine("Email is missing."); - } - - if (string.IsNullOrEmpty(this.phone)) - { - missingInformationStringBuilder.AppendLine("Phone is missing."); - } - - if (string.IsNullOrEmpty(this.address)) - { - missingInformationStringBuilder.AppendLine("Address is missing."); - } - - if (this.receiveUpdates == null) - { - missingInformationStringBuilder.AppendLine("ReceiveUpdates is missing."); - } - - if (missingInformationStringBuilder.Length > 0) - { - return missingInformationStringBuilder.ToString(); - } - else - { - return "Application information is saved to database."; - } - } -} -#endregion FillFormTool - -public class FSM_Group_Chat -{ - public static async Task CreateSaveProgressAgent(OpenAIClient client, string model) - { - #region Create_Save_Progress_Agent - var tool = new FillFormTool(); - var functionCallMiddleware = new FunctionCallMiddleware( - functions: [tool.SaveProgressFunctionContract], - functionMap: new Dictionary>> - { - { tool.SaveProgressFunctionContract.Name!, tool.SaveProgressWrapper }, - }); - - var chatAgent = new OpenAIChatAgent( - openAIClient: client, - name: "application", - modelName: model, - systemMessage: """You are a helpful application form assistant who saves progress while user fills application.""") - .RegisterMessageConnector() - .RegisterMiddleware(functionCallMiddleware) - .RegisterMiddleware(async (msgs, option, agent, ct) => - { - var lastUserMessage = msgs.Last() ?? throw new Exception("No user message found."); - var prompt = $""" - Save progress according to the most recent information provided by user. - - ```user - {lastUserMessage.GetContent()} - ``` - """; - - return await agent.GenerateReplyAsync([lastUserMessage], option, ct); - - }); - #endregion Create_Save_Progress_Agent - - return chatAgent; - } - - public static async Task CreateAssistantAgent(OpenAIClient openaiClient, string model) - { - #region Create_Assistant_Agent - var chatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "assistant", - modelName: model, - systemMessage: """You create polite prompt to ask user provide missing information""") - .RegisterMessageConnector() - .RegisterPrintMessage(); - #endregion Create_Assistant_Agent - return chatAgent; - } - - public static async Task CreateUserAgent(OpenAIClient openaiClient, string model) - { - #region Create_User_Agent - var chatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "user", - modelName: model, - systemMessage: """ - You are a user who is filling an application form. Simply provide the information as requested and answer the questions, don't do anything else. - - here's some personal information about you: - - name: John Doe - - email: 1234567@gmail.com - - phone: 123-456-7890 - - address: 1234 Main St, Redmond, WA 98052 - - want to receive update? true - """) - .RegisterMessageConnector() - .RegisterPrintMessage(); - #endregion Create_User_Agent - return chatAgent; - } - - public static async Task RunAsync() - { - var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var model = "gpt-3.5-turbo"; - var openaiClient = new OpenAIClient(apiKey); - var applicationAgent = await CreateSaveProgressAgent(openaiClient, model); - var assistantAgent = await CreateAssistantAgent(openaiClient, model); - var userAgent = await CreateUserAgent(openaiClient, model); - - #region Create_Graph - var userToApplicationTransition = Transition.Create(userAgent, applicationAgent); - var applicationToAssistantTransition = Transition.Create(applicationAgent, assistantAgent); - var assistantToUserTransition = Transition.Create(assistantAgent, userAgent); - - var workflow = new Graph( - [ - userToApplicationTransition, - applicationToAssistantTransition, - assistantToUserTransition, - ]); - #endregion Create_Graph - - #region Group_Chat - var groupChat = new GroupChat( - members: [userAgent, applicationAgent, assistantAgent], - workflow: workflow); - #endregion Group_Chat - - var initialMessage = await assistantAgent.SendAsync("Generate a greeting meesage for user and start the conversation by asking what's their name."); - - var chatHistory = new List { initialMessage }; - await foreach (var msg in groupChat.SendAsync(chatHistory, maxRound: 30)) - { - if (msg.GetContent().ToLower().Contains("application information is saved to database.") is true) - { - break; - } - } - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Image_Chat_With_Agent.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Image_Chat_With_Agent.cs deleted file mode 100644 index c24c536c9b..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Image_Chat_With_Agent.cs +++ /dev/null @@ -1,59 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Image_Chat_With_Agent.cs - -#region Using -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -#endregion Using -using FluentAssertions; - -namespace AutoGen.BasicSample; - -public class Image_Chat_With_Agent -{ - public static async Task RunAsync() - { - #region Create_Agent - var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var model = "gpt-4o"; // The model needs to support multimodal inputs - var openaiClient = new OpenAIClient(apiKey); - - var agent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "agent", - modelName: model, - systemMessage: "You are a helpful AI assistant") - .RegisterMessageConnector() // convert OpenAI message to AutoGen message - .RegisterPrintMessage(); - #endregion Create_Agent - - #region Prepare_Image_Input - var backgoundImagePath = Path.Combine("resource", "images", "background.png"); - var imageBytes = File.ReadAllBytes(backgoundImagePath); - var imageMessage = new ImageMessage(Role.User, BinaryData.FromBytes(imageBytes, "image/png")); - #endregion Prepare_Image_Input - - #region Prepare_Multimodal_Input - var textMessage = new TextMessage(Role.User, "what's in the picture"); - var multimodalMessage = new MultiModalMessage(Role.User, [textMessage, imageMessage]); - #endregion Prepare_Multimodal_Input - - #region Chat_With_Agent - var reply = await agent.SendAsync("what's in the picture", chatHistory: [imageMessage]); - // or use multimodal message to generate reply - reply = await agent.SendAsync(multimodalMessage); - #endregion Chat_With_Agent - - #region verify_reply - reply.Should().BeOfType(); - #endregion verify_reply - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Streaming_Tool_Call.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Streaming_Tool_Call.cs deleted file mode 100644 index 653d7dc192..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Streaming_Tool_Call.cs +++ /dev/null @@ -1,62 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Streaming_Tool_Call.cs - -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -using FluentAssertions; - -namespace AutoGen.BasicSample.GettingStart; - -internal class Streaming_Tool_Call -{ - public static async Task RunAsync() - { - #region Create_tools - var tools = new Tools(); - #endregion Create_tools - - #region Create_auto_invoke_middleware - var autoInvokeMiddleware = new FunctionCallMiddleware( - functions: [tools.GetWeatherFunctionContract], - functionMap: new Dictionary>>() - { - { tools.GetWeatherFunctionContract.Name, tools.GetWeatherWrapper }, - }); - #endregion Create_auto_invoke_middleware - - #region Create_Agent - var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var model = "gpt-4o"; - var openaiClient = new OpenAIClient(apiKey); - var agent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "agent", - modelName: model, - systemMessage: "You are a helpful AI assistant") - .RegisterMessageConnector() - .RegisterStreamingMiddleware(autoInvokeMiddleware) - .RegisterPrintMessage(); - #endregion Create_Agent - - IMessage finalReply = null; - var question = new TextMessage(Role.User, "What's the weather in Seattle"); - - // In streaming function call - // function can only be invoked untill all the chunks are collected - // therefore, only one ToolCallAggregateMessage chunk will be return here. - await foreach (var message in agent.GenerateStreamingReplyAsync([question])) - { - finalReply = message; - } - - finalReply?.GetContent().Should().Be("The weather in Seattle is sunny."); - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs deleted file mode 100644 index a971971e0a..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs +++ /dev/null @@ -1,113 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Use_Tools_With_Agent.cs - -#region Using -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -#endregion Using -using FluentAssertions; - -namespace AutoGen.BasicSample; - -#region Tools -public partial class Tools -{ - /// - /// Get the weather of the city. - /// - /// - [Function] - public async Task GetWeather(string city) - { - return $"The weather in {city} is sunny."; - } -} -#endregion Tools - -public class Use_Tools_With_Agent -{ - public static async Task RunAsync() - { - #region Create_tools - var tools = new Tools(); - #endregion Create_tools - - #region Create_auto_invoke_middleware - var autoInvokeMiddleware = new FunctionCallMiddleware( - functions: [tools.GetWeatherFunctionContract], - functionMap: new Dictionary>>() - { - { tools.GetWeatherFunctionContract.Name!, tools.GetWeatherWrapper }, - }); - #endregion Create_auto_invoke_middleware - - #region Create_no_invoke_middleware - var noInvokeMiddleware = new FunctionCallMiddleware( - functions: [tools.GetWeatherFunctionContract]); - #endregion Create_no_invoke_middleware - - #region Create_Agent - var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var model = "gpt-3.5-turbo"; - var openaiClient = new OpenAIClient(apiKey); - var agent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "agent", - modelName: model, - systemMessage: "You are a helpful AI assistant") - .RegisterMessageConnector(); // convert OpenAI message to AutoGen message - #endregion Create_Agent - - #region Single_Turn_Auto_Invoke - var autoInvokeAgent = agent - .RegisterMiddleware(autoInvokeMiddleware) // pass function definition to agent. - .RegisterPrintMessage(); // print the message content - var question = new TextMessage(Role.User, "What is the weather in Seattle?"); - var reply = await autoInvokeAgent.SendAsync(question); - reply.Should().BeOfType(); - #endregion Single_Turn_Auto_Invoke - - #region Single_Turn_No_Invoke - var noInvokeAgent = agent - .RegisterMiddleware(noInvokeMiddleware) // pass function definition to agent. - .RegisterPrintMessage(); // print the message content - - question = new TextMessage(Role.User, "What is the weather in Seattle?"); - reply = await noInvokeAgent.SendAsync(question); - reply.Should().BeOfType(); - #endregion Single_Turn_No_Invoke - - #region Multi_Turn_Tool_Call - var finalReply = await agent.SendAsync(chatHistory: [question, reply]); - #endregion Multi_Turn_Tool_Call - - #region verify_reply - finalReply.Should().BeOfType(); - #endregion verify_reply - - #region parallel_tool_call - question = new TextMessage(Role.User, "What is the weather in Seattle, New York and Vancouver"); - reply = await agent.SendAsync(question); - #endregion parallel_tool_call - - #region verify_parallel_tool_call_reply - reply.Should().BeOfType(); - (reply as ToolCallAggregateMessage)!.Message1.ToolCalls.Count().Should().Be(3); - #endregion verify_parallel_tool_call_reply - - #region Multi_Turn_Parallel_Tool_Call - finalReply = await agent.SendAsync(chatHistory: [question, reply]); - finalReply.Should().BeOfType(); - (finalReply as ToolCallAggregateMessage)!.Message1.ToolCalls.Count().Should().Be(3); - #endregion Multi_Turn_Parallel_Tool_Call - } - -} diff --git a/dotnet/sample/AutoGen.BasicSamples/GlobalUsing.cs b/dotnet/sample/AutoGen.BasicSamples/GlobalUsing.cs deleted file mode 100644 index a2db22494d..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/GlobalUsing.cs +++ /dev/null @@ -1,9 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GlobalUsing.cs - diff --git a/dotnet/sample/AutoGen.BasicSamples/LLMConfiguration.cs b/dotnet/sample/AutoGen.BasicSamples/LLMConfiguration.cs deleted file mode 100644 index 046535f6fa..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/LLMConfiguration.cs +++ /dev/null @@ -1,46 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// LLMConfiguration.cs - -using AutoGen.OpenAI.V1; - -namespace AutoGen.BasicSample; - -internal static class LLMConfiguration -{ - public static OpenAIConfig GetOpenAIGPT3_5_Turbo() - { - var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var modelId = "gpt-3.5-turbo"; - return new OpenAIConfig(openAIKey, modelId); - } - - public static OpenAIConfig GetOpenAIGPT4() - { - var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var modelId = "gpt-4"; - - return new OpenAIConfig(openAIKey, modelId); - } - - public static AzureOpenAIConfig GetAzureOpenAIGPT3_5_Turbo(string? deployName = null) - { - var azureOpenAIKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - deployName = deployName ?? Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - return new AzureOpenAIConfig(endpoint, deployName, azureOpenAIKey); - } - - public static AzureOpenAIConfig GetAzureOpenAIGPT4(string deployName = "gpt-4") - { - var azureOpenAIKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - - return new AzureOpenAIConfig(endpoint, deployName, azureOpenAIKey); - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/Program.cs b/dotnet/sample/AutoGen.BasicSamples/Program.cs deleted file mode 100644 index 8cda873c8c..0000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Program.cs +++ /dev/null @@ -1,65 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Program.cs - -//await Example07_Dynamic_GroupChat_Calculate_Fibonacci.RunAsync(); - -using AutoGen.BasicSample; - -//Define allSamples collection for all examples -List>> allSamples = new List>>(); - -// When a new sample is created please add them to the allSamples collection -allSamples.Add(new Tuple>("Assistant Agent", async () => { await Example01_AssistantAgent.RunAsync(); })); -allSamples.Add(new Tuple>("Two-agent Math Chat", async () => { await Example02_TwoAgent_MathChat.RunAsync(); })); -allSamples.Add(new Tuple>("Agent Function Call", async () => { await Example03_Agent_FunctionCall.RunAsync(); })); -allSamples.Add(new Tuple>("Dynamic Group Chat Coding Task", async () => { await Example04_Dynamic_GroupChat_Coding_Task.RunAsync(); })); -allSamples.Add(new Tuple>("DALL-E and GPT4v", async () => { await Example05_Dalle_And_GPT4V.RunAsync(); })); -allSamples.Add(new Tuple>("User Proxy Agent", async () => { await Example06_UserProxyAgent.RunAsync(); })); -allSamples.Add(new Tuple>("Dynamic Group Chat - Calculate Fibonacci", async () => { await Example07_Dynamic_GroupChat_Calculate_Fibonacci.RunAsync(); })); -allSamples.Add(new Tuple>("LM Studio", async () => { await Example08_LMStudio.RunAsync(); })); -allSamples.Add(new Tuple>("LM Studio - Function Call", async () => { await Example09_LMStudio_FunctionCall.RunAsync(); })); -allSamples.Add(new Tuple>("Semantic Kernel", async () => { await Example10_SemanticKernel.RunAsync(); })); -allSamples.Add(new Tuple>("Sequential Group Chat", async () => { await Sequential_GroupChat_Example.RunAsync(); })); -allSamples.Add(new Tuple>("Two Agent - Fill Application", async () => { await TwoAgent_Fill_Application.RunAsync(); })); -allSamples.Add(new Tuple>("Mistal Client Agent - Token Count", async () => { await Example14_MistralClientAgent_TokenCount.RunAsync(); })); -allSamples.Add(new Tuple>("GPT4v - Binary Data Image", async () => { await Example15_GPT4V_BinaryDataImageMessage.RunAsync(); })); -allSamples.Add(new Tuple>("ReAct Agent", async () => { await Example17_ReActAgent.RunAsync(); })); - - -int idx = 1; -Dictionary>> map = new Dictionary>>(); -Console.WriteLine("Available Examples:\n\n"); -foreach (Tuple> sample in allSamples) -{ - map.Add(idx, sample); - Console.WriteLine("{0}. {1}", idx++, sample.Item1); -} - -Console.WriteLine("\n\nEnter your selection:"); - -try -{ - int val = Convert.ToInt32(Console.ReadLine()); - - if (!map.ContainsKey(val)) - { - Console.WriteLine("Invalid choice"); - } - else - { - Console.WriteLine("\nRunning {0}", map[val].Item1); - await map[val].Item2.Invoke(); - } -} -catch -{ - Console.WriteLine("Error encountered, please check your entry and run again"); -} - - diff --git a/dotnet/sample/AutoGen.Gemini.Sample/AutoGen.Gemini.Sample.csproj b/dotnet/sample/AutoGen.Gemini.Sample/AutoGen.Gemini.Sample.csproj deleted file mode 100644 index d1df8a8ed1..0000000000 --- a/dotnet/sample/AutoGen.Gemini.Sample/AutoGen.Gemini.Sample.csproj +++ /dev/null @@ -1,19 +0,0 @@ -ο»Ώ - - - Exe - $(TestTargetFrameworks) - enable - enable - true - True - - - - - - - - - - diff --git a/dotnet/sample/AutoGen.Gemini.Sample/Chat_With_Google_Gemini.cs b/dotnet/sample/AutoGen.Gemini.Sample/Chat_With_Google_Gemini.cs deleted file mode 100644 index b018c135b5..0000000000 --- a/dotnet/sample/AutoGen.Gemini.Sample/Chat_With_Google_Gemini.cs +++ /dev/null @@ -1,47 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Chat_With_Google_Gemini.cs - -#region Using -using AutoGen.Core; -#endregion Using -using FluentAssertions; - -namespace AutoGen.Gemini.Sample; - -public class Chat_With_Google_Gemini -{ - public static async Task RunAsync() - { - #region Create_Gemini_Agent - var apiKey = Environment.GetEnvironmentVariable("GOOGLE_GEMINI_API_KEY"); - - if (apiKey is null) - { - Console.WriteLine("Please set GOOGLE_GEMINI_API_KEY environment variable."); - return; - } - - var geminiAgent = new GeminiChatAgent( - name: "gemini", - model: "gemini-1.5-flash-001", - apiKey: apiKey, - systemMessage: "You are a helpful C# engineer, put your code between ```csharp and ```, don't explain the code") - .RegisterMessageConnector() - .RegisterPrintMessage(); - #endregion Create_Gemini_Agent - - #region Chat_With_Google_Gemini - var reply = await geminiAgent.SendAsync("Can you write a piece of C# code to calculate 100th of fibonacci?"); - #endregion Chat_With_Google_Gemini - - #region verify_reply - reply.Should().BeOfType(); - #endregion verify_reply - } -} diff --git a/dotnet/sample/AutoGen.Gemini.Sample/Chat_With_Vertex_Gemini.cs b/dotnet/sample/AutoGen.Gemini.Sample/Chat_With_Vertex_Gemini.cs deleted file mode 100644 index bd57787aa2..0000000000 --- a/dotnet/sample/AutoGen.Gemini.Sample/Chat_With_Vertex_Gemini.cs +++ /dev/null @@ -1,48 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Chat_With_Vertex_Gemini.cs - -#region Using -using AutoGen.Core; -#endregion Using -using FluentAssertions; - -namespace AutoGen.Gemini.Sample; - -public class Chat_With_Vertex_Gemini -{ - public static async Task RunAsync() - { - #region Create_Gemini_Agent - var projectID = Environment.GetEnvironmentVariable("GCP_VERTEX_PROJECT_ID"); - - if (projectID is null) - { - Console.WriteLine("Please set GCP_VERTEX_PROJECT_ID environment variable."); - return; - } - - var geminiAgent = new GeminiChatAgent( - name: "gemini", - model: "gemini-1.5-flash-001", - location: "us-east1", - project: projectID, - systemMessage: "You are a helpful C# engineer, put your code between ```csharp and ```, don't explain the code") - .RegisterMessageConnector() - .RegisterPrintMessage(); - #endregion Create_Gemini_Agent - - #region Chat_With_Vertex_Gemini - var reply = await geminiAgent.SendAsync("Can you write a piece of C# code to calculate 100th of fibonacci?"); - #endregion Chat_With_Vertex_Gemini - - #region verify_reply - reply.Should().BeOfType(); - #endregion verify_reply - } -} diff --git a/dotnet/sample/AutoGen.Gemini.Sample/Function_Call_With_Gemini.cs b/dotnet/sample/AutoGen.Gemini.Sample/Function_Call_With_Gemini.cs deleted file mode 100644 index d02a5b3a31..0000000000 --- a/dotnet/sample/AutoGen.Gemini.Sample/Function_Call_With_Gemini.cs +++ /dev/null @@ -1,137 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Function_Call_With_Gemini.cs - -#region Using -using AutoGen.Core; -using Google.Cloud.AIPlatform.V1; -#endregion Using -using FluentAssertions; - -namespace AutoGen.Gemini.Sample; - -#region MovieFunction -public partial class MovieFunction -{ - /// - /// find movie titles currently playing in theaters based on any description, genre, title words, etc. - /// - /// The city and state, e.g. San Francisco, CA or a zip code e.g. 95616 - /// Any kind of description including category or genre, title words, attributes, etc. - /// - [Function] - public async Task FindMovies(string location, string description) - { - // dummy implementation - var movies = new List { "Barbie", "Spiderman", "Batman" }; - var result = $"Movies playing in {location} based on {description} are: {string.Join(", ", movies)}"; - - return result; - } - - /// - /// find theaters based on location and optionally movie title which is currently playing in theaters - /// - /// The city and state, e.g. San Francisco, CA or a zip code e.g. 95616 - /// Any movie title - [Function] - public async Task FindTheaters(string location, string movie) - { - // dummy implementation - var theaters = new List { "AMC", "Regal", "Cinemark" }; - var result = $"Theaters playing {movie} in {location} are: {string.Join(", ", theaters)}"; - - return result; - } - - /// - /// Find the start times for movies playing in a specific theater - /// - /// The city and state, e.g. San Francisco, CA or a zip code e.g. 95616 - /// Any movie title - /// Name of the theater - /// Date for requested showtime - /// - [Function] - public async Task GetShowtimes(string location, string movie, string theater, string date) - { - // dummy implementation - var showtimes = new List { "10:00 AM", "12:00 PM", "2:00 PM", "4:00 PM", "6:00 PM", "8:00 PM" }; - var result = $"Showtimes for {movie} at {theater} in {location} are: {string.Join(", ", showtimes)}"; - - return result; - } -} -#endregion MovieFunction - -/// -/// Modified from https://ai.google.dev/gemini-api/docs/function-calling -/// -public partial class Function_Call_With_Gemini -{ - public static async Task RunAsync() - { - #region Create_Gemini_Agent - var projectID = Environment.GetEnvironmentVariable("GCP_VERTEX_PROJECT_ID"); - - if (projectID is null) - { - Console.WriteLine("Please set GCP_VERTEX_PROJECT_ID environment variable."); - return; - } - - var movieFunction = new MovieFunction(); - var functionMiddleware = new FunctionCallMiddleware( - functions: [ - movieFunction.FindMoviesFunctionContract, - movieFunction.FindTheatersFunctionContract, - movieFunction.GetShowtimesFunctionContract - ], - functionMap: new Dictionary>> - { - { movieFunction.FindMoviesFunctionContract.Name!, movieFunction.FindMoviesWrapper }, - { movieFunction.FindTheatersFunctionContract.Name!, movieFunction.FindTheatersWrapper }, - { movieFunction.GetShowtimesFunctionContract.Name!, movieFunction.GetShowtimesWrapper }, - }); - - var geminiAgent = new GeminiChatAgent( - name: "gemini", - model: "gemini-1.5-flash-001", - location: "us-central1", - project: projectID, - systemMessage: "You are a helpful AI assistant", - toolConfig: new ToolConfig() - { - FunctionCallingConfig = new FunctionCallingConfig() - { - Mode = FunctionCallingConfig.Types.Mode.Auto, - } - }) - .RegisterMessageConnector() - .RegisterPrintMessage() - .RegisterStreamingMiddleware(functionMiddleware); - #endregion Create_Gemini_Agent - - #region Single_turn - var question = new TextMessage(Role.User, "What movies are showing in North Seattle tonight?"); - var functionCallReply = await geminiAgent.SendAsync(question); - #endregion Single_turn - - #region Single_turn_verify_reply - functionCallReply.Should().BeOfType(); - #endregion Single_turn_verify_reply - - #region Multi_turn - var finalReply = await geminiAgent.SendAsync(chatHistory: [question, functionCallReply]); - #endregion Multi_turn - - #region Multi_turn_verify_reply - finalReply.Should().BeOfType(); - #endregion Multi_turn_verify_reply - } -} diff --git a/dotnet/sample/AutoGen.Gemini.Sample/Image_Chat_With_Vertex_Gemini.cs b/dotnet/sample/AutoGen.Gemini.Sample/Image_Chat_With_Vertex_Gemini.cs deleted file mode 100644 index 4123779fed..0000000000 --- a/dotnet/sample/AutoGen.Gemini.Sample/Image_Chat_With_Vertex_Gemini.cs +++ /dev/null @@ -1,51 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Image_Chat_With_Vertex_Gemini.cs - -#region Using -using AutoGen.Core; -#endregion Using -using FluentAssertions; - -namespace AutoGen.Gemini.Sample; - -public class Image_Chat_With_Vertex_Gemini -{ - public static async Task RunAsync() - { - #region Create_Gemini_Agent - var projectID = Environment.GetEnvironmentVariable("GCP_VERTEX_PROJECT_ID"); - - if (projectID is null) - { - Console.WriteLine("Please set GCP_VERTEX_PROJECT_ID environment variable."); - return; - } - - var geminiAgent = new GeminiChatAgent( - name: "gemini", - model: "gemini-1.5-flash-001", - location: "us-east4", - project: projectID, - systemMessage: "You explain image content to user") - .RegisterMessageConnector() - .RegisterPrintMessage(); - #endregion Create_Gemini_Agent - - #region Send_Image_Request - var imagePath = Path.Combine("resource", "images", "background.png"); - var image = await File.ReadAllBytesAsync(imagePath); - var imageMessage = new ImageMessage(Role.User, BinaryData.FromBytes(image, "image/png")); - var reply = await geminiAgent.SendAsync("what's in the image", [imageMessage]); - #endregion Send_Image_Request - - #region Verify_Reply - reply.Should().BeOfType(); - #endregion Verify_Reply - } -} diff --git a/dotnet/sample/AutoGen.Gemini.Sample/Program.cs b/dotnet/sample/AutoGen.Gemini.Sample/Program.cs deleted file mode 100644 index 6dc99af856..0000000000 --- a/dotnet/sample/AutoGen.Gemini.Sample/Program.cs +++ /dev/null @@ -1,12 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Program.cs - -using AutoGen.Gemini.Sample; - -Image_Chat_With_Vertex_Gemini.RunAsync().Wait(); diff --git a/dotnet/sample/AutoGen.Ollama.Sample/AutoGen.Ollama.Sample.csproj b/dotnet/sample/AutoGen.Ollama.Sample/AutoGen.Ollama.Sample.csproj deleted file mode 100644 index 62c9d61633..0000000000 --- a/dotnet/sample/AutoGen.Ollama.Sample/AutoGen.Ollama.Sample.csproj +++ /dev/null @@ -1,19 +0,0 @@ -ο»Ώ - - Exe - $(TestTargetFrameworks) - enable - True - $(NoWarn);CS8981;CS8600;CS8602;CS8604;CS8618;CS0219;SKEXP0054;SKEXP0050;SKEXP0110 - true - - - - - - - - - - - diff --git a/dotnet/sample/AutoGen.Ollama.Sample/Chat_With_LLaMA.cs b/dotnet/sample/AutoGen.Ollama.Sample/Chat_With_LLaMA.cs deleted file mode 100644 index 57f83ff5aa..0000000000 --- a/dotnet/sample/AutoGen.Ollama.Sample/Chat_With_LLaMA.cs +++ /dev/null @@ -1,38 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Chat_With_LLaMA.cs - -#region Using -using AutoGen.Core; -using AutoGen.Ollama.Extension; -#endregion Using - -namespace AutoGen.Ollama.Sample; - -public class Chat_With_LLaMA -{ - public static async Task RunAsync() - { - #region Create_Ollama_Agent - using var httpClient = new HttpClient() - { - BaseAddress = new Uri("http://localhost:11434"), - }; - - var ollamaAgent = new OllamaAgent( - httpClient: httpClient, - name: "ollama", - modelName: "llama3:latest", - systemMessage: "You are a helpful AI assistant") - .RegisterMessageConnector() - .RegisterPrintMessage(); - - var reply = await ollamaAgent.SendAsync("Can you write a piece of C# code to calculate 100th of fibonacci?"); - #endregion Create_Ollama_Agent - } -} diff --git a/dotnet/sample/AutoGen.Ollama.Sample/Chat_With_LLaVA.cs b/dotnet/sample/AutoGen.Ollama.Sample/Chat_With_LLaVA.cs deleted file mode 100644 index e0a21e4b79..0000000000 --- a/dotnet/sample/AutoGen.Ollama.Sample/Chat_With_LLaVA.cs +++ /dev/null @@ -1,54 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Chat_With_LLaVA.cs - -#region Using -using AutoGen.Core; -using AutoGen.Ollama.Extension; -#endregion Using - -namespace AutoGen.Ollama.Sample; - -public class Chat_With_LLaVA -{ - public static async Task RunAsync() - { - #region Create_Ollama_Agent - using var httpClient = new HttpClient() - { - BaseAddress = new Uri("http://localhost:11434"), - }; - - var ollamaAgent = new OllamaAgent( - httpClient: httpClient, - name: "ollama", - modelName: "llava:latest", - systemMessage: "You are a helpful AI assistant") - .RegisterMessageConnector() - .RegisterPrintMessage(); - #endregion Create_Ollama_Agent - - #region Send_Message - var image = Path.Combine("resource", "images", "background.png"); - var binaryData = BinaryData.FromBytes(File.ReadAllBytes(image), "image/png"); - var imageMessage = new ImageMessage(Role.User, binaryData); - var textMessage = new TextMessage(Role.User, "what's in this image?"); - var reply = await ollamaAgent.SendAsync(chatHistory: [textMessage, imageMessage]); - #endregion Send_Message - - #region Send_MultiModal_Message - // You can also use MultiModalMessage to put text and image together in one message - // In this case, all the messages in the multi-modal message will be put into single piece of message - // where the text is the concatenation of all the text messages seperated by \n - // and the images are all the images in the multi-modal message - var multiModalMessage = new MultiModalMessage(Role.User, [textMessage, imageMessage]); - - reply = await ollamaAgent.SendAsync(chatHistory: [multiModalMessage]); - #endregion Send_MultiModal_Message - } -} diff --git a/dotnet/sample/AutoGen.Ollama.Sample/Program.cs b/dotnet/sample/AutoGen.Ollama.Sample/Program.cs deleted file mode 100644 index 427cf2bbe2..0000000000 --- a/dotnet/sample/AutoGen.Ollama.Sample/Program.cs +++ /dev/null @@ -1,12 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Program.cs - -using AutoGen.Ollama.Sample; - -await Chat_With_LLaVA.RunAsync(); diff --git a/dotnet/sample/AutoGen.OpenAI.Sample/AutoGen.OpenAI.V1.Sample.csproj b/dotnet/sample/AutoGen.OpenAI.Sample/AutoGen.OpenAI.V1.Sample.csproj deleted file mode 100644 index 49c0e21c9e..0000000000 --- a/dotnet/sample/AutoGen.OpenAI.Sample/AutoGen.OpenAI.V1.Sample.csproj +++ /dev/null @@ -1,21 +0,0 @@ -ο»Ώ - - - Exe - $(TestTargetFrameworks) - enable - enable - True - $(NoWarn);CS8981;CS8600;CS8602;CS8604;CS8618;CS0219;SKEXP0054;SKEXP0050;SKEXP0110 - true - - - - - - - - - - - diff --git a/dotnet/sample/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs b/dotnet/sample/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs deleted file mode 100644 index 5a9d9fa7b4..0000000000 --- a/dotnet/sample/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs +++ /dev/null @@ -1,69 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Connect_To_Ollama.cs - -#region using_statement -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -using Azure.Core.Pipeline; -#endregion using_statement - -namespace AutoGen.OpenAI.Sample; - -#region CustomHttpClientHandler -public sealed class CustomHttpClientHandler : HttpClientHandler -{ - private string _modelServiceUrl; - - public CustomHttpClientHandler(string modelServiceUrl) - { - _modelServiceUrl = modelServiceUrl; - } - - protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - request.RequestUri = new Uri($"{_modelServiceUrl}{request.RequestUri.PathAndQuery}"); - - return base.SendAsync(request, cancellationToken); - } -} -#endregion CustomHttpClientHandler - -public class Connect_To_Ollama -{ - public static async Task RunAsync() - { - #region create_agent - using var client = new HttpClient(new CustomHttpClientHandler("http://localhost:11434")); - var option = new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2024_04_01_Preview) - { - Transport = new HttpClientTransport(client), - }; - - // api-key is not required for local server - // so you can use any string here - var openAIClient = new OpenAIClient("api-key", option); - var model = "llama3"; - - var agent = new OpenAIChatAgent( - openAIClient: openAIClient, - name: "assistant", - modelName: model, - systemMessage: "You are a helpful assistant designed to output JSON.", - seed: 0) - .RegisterMessageConnector() - .RegisterPrintMessage(); - #endregion create_agent - - #region send_message - await agent.SendAsync("Can you write a piece of C# code to calculate 100th of fibonacci?"); - #endregion send_message - } -} diff --git a/dotnet/sample/AutoGen.OpenAI.Sample/Program.cs b/dotnet/sample/AutoGen.OpenAI.Sample/Program.cs deleted file mode 100644 index 6a9e0de138..0000000000 --- a/dotnet/sample/AutoGen.OpenAI.Sample/Program.cs +++ /dev/null @@ -1,12 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Program.cs - -using AutoGen.OpenAI.Sample; - -Tool_Call_With_Ollama_And_LiteLLM.RunAsync().Wait(); diff --git a/dotnet/sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs b/dotnet/sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs deleted file mode 100644 index bdd18492ff..0000000000 --- a/dotnet/sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs +++ /dev/null @@ -1,75 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Tool_Call_With_Ollama_And_LiteLLM.cs - -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -using Azure.Core.Pipeline; - -namespace AutoGen.OpenAI.Sample; - -#region Function -public partial class Function -{ - [Function] - public async Task GetWeatherAsync(string city) - { - return await Task.FromResult("The weather in " + city + " is 72 degrees and sunny."); - } -} -#endregion Function - -public class Tool_Call_With_Ollama_And_LiteLLM -{ - public static async Task RunAsync() - { - // Before running this code, make sure you have - // - Ollama: - // - Install dolphincoder:latest in Ollama - // - Ollama running on http://localhost:11434 - // - LiteLLM - // - Install LiteLLM - // - Start LiteLLM with the following command: - // - litellm --model ollama_chat/dolphincoder --port 4000 - - # region Create_tools - var functions = new Function(); - var functionMiddleware = new FunctionCallMiddleware( - functions: [functions.GetWeatherAsyncFunctionContract], - functionMap: new Dictionary>> - { - { functions.GetWeatherAsyncFunctionContract.Name!, functions.GetWeatherAsyncWrapper }, - }); - #endregion Create_tools - #region Create_Agent - var liteLLMUrl = "http://localhost:4000"; - using var httpClient = new HttpClient(new CustomHttpClientHandler(liteLLMUrl)); - var option = new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2024_04_01_Preview) - { - Transport = new HttpClientTransport(httpClient), - }; - - // api-key is not required for local server - // so you can use any string here - var openAIClient = new OpenAIClient("api-key", option); - - var agent = new OpenAIChatAgent( - openAIClient: openAIClient, - name: "assistant", - modelName: "dolphincoder:latest", - systemMessage: "You are a helpful AI assistant") - .RegisterMessageConnector() - .RegisterMiddleware(functionMiddleware) - .RegisterPrintMessage(); - - var reply = await agent.SendAsync("what's the weather in new york"); - #endregion Create_Agent - } -} diff --git a/dotnet/sample/AutoGen.OpenAI.Sample/Use_Json_Mode.cs b/dotnet/sample/AutoGen.OpenAI.Sample/Use_Json_Mode.cs deleted file mode 100644 index f87d79d226..0000000000 --- a/dotnet/sample/AutoGen.OpenAI.Sample/Use_Json_Mode.cs +++ /dev/null @@ -1,73 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Use_Json_Mode.cs - -using System.Text.Json; -using System.Text.Json.Serialization; -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -using FluentAssertions; - -namespace AutoGen.BasicSample; - -public class Use_Json_Mode -{ - public static async Task RunAsync() - { - #region create_agent - var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var model = "gpt-3.5-turbo"; - - var openAIClient = new OpenAIClient(apiKey); - var openAIClientAgent = new OpenAIChatAgent( - openAIClient: openAIClient, - name: "assistant", - modelName: model, - systemMessage: "You are a helpful assistant designed to output JSON.", - seed: 0, // explicitly set a seed to enable deterministic output - responseFormat: ChatCompletionsResponseFormat.JsonObject) // set response format to JSON object to enable JSON mode - .RegisterMessageConnector() - .RegisterPrintMessage(); - #endregion create_agent - - #region chat_with_agent - var reply = await openAIClientAgent.SendAsync("My name is John, I am 25 years old, and I live in Seattle."); - - var person = JsonSerializer.Deserialize(reply.GetContent()); - Console.WriteLine($"Name: {person.Name}"); - Console.WriteLine($"Age: {person.Age}"); - - if (!string.IsNullOrEmpty(person.Address)) - { - Console.WriteLine($"Address: {person.Address}"); - } - - Console.WriteLine("Done."); - #endregion chat_with_agent - - person.Name.Should().Be("John"); - person.Age.Should().Be(25); - person.Address.Should().BeNullOrEmpty(); - } -} - -#region person_class -public class Person -{ - [JsonPropertyName("name")] - public string Name { get; set; } - - [JsonPropertyName("age")] - public int Age { get; set; } - - [JsonPropertyName("address")] - public string Address { get; set; } -} -#endregion person_class diff --git a/dotnet/sample/AutoGen.SemanticKernel.Sample/AutoGen.SemanticKernel.Sample.csproj b/dotnet/sample/AutoGen.SemanticKernel.Sample/AutoGen.SemanticKernel.Sample.csproj deleted file mode 100644 index df1064e18c..0000000000 --- a/dotnet/sample/AutoGen.SemanticKernel.Sample/AutoGen.SemanticKernel.Sample.csproj +++ /dev/null @@ -1,17 +0,0 @@ -ο»Ώ - - - Exe - $(TestTargetFrameworks) - True - $(NoWarn);CS8981;CS8600;CS8602;CS8604;CS8618;CS0219;SKEXP0054;SKEXP0050;SKEXP0110 - enable - - - - - - - - - diff --git a/dotnet/sample/AutoGen.SemanticKernel.Sample/Create_Semantic_Kernel_Agent.cs b/dotnet/sample/AutoGen.SemanticKernel.Sample/Create_Semantic_Kernel_Agent.cs deleted file mode 100644 index 627e8ac838..0000000000 --- a/dotnet/sample/AutoGen.SemanticKernel.Sample/Create_Semantic_Kernel_Agent.cs +++ /dev/null @@ -1,35 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Create_Semantic_Kernel_Agent.cs - -using AutoGen.Core; -using AutoGen.SemanticKernel.Extension; -using Microsoft.SemanticKernel; - -namespace AutoGen.SemanticKernel.Sample; - -public class Create_Semantic_Kernel_Agent -{ - public static async Task RunAsync() - { - var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var modelId = "gpt-3.5-turbo"; - var kernel = Kernel.CreateBuilder() - .AddOpenAIChatCompletion(modelId: modelId, apiKey: openAIKey) - .Build(); - - var skAgent = new SemanticKernelAgent( - kernel: kernel, - name: "assistant", - systemMessage: "You are a helpful AI assistant") - .RegisterMessageConnector() // register message connector so it support AutoGen built-in message types like TextMessage. - .RegisterPrintMessage(); // pretty print the message to the console - - await skAgent.SendAsync("Hey tell me a long tedious joke"); - } -} diff --git a/dotnet/sample/AutoGen.SemanticKernel.Sample/Create_Semantic_Kernel_Chat_Agent.cs b/dotnet/sample/AutoGen.SemanticKernel.Sample/Create_Semantic_Kernel_Chat_Agent.cs deleted file mode 100644 index 777e9ff33e..0000000000 --- a/dotnet/sample/AutoGen.SemanticKernel.Sample/Create_Semantic_Kernel_Chat_Agent.cs +++ /dev/null @@ -1,50 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Create_Semantic_Kernel_Chat_Agent.cs - -#region Using -using AutoGen.Core; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; -#endregion Using -namespace AutoGen.SemanticKernel.Sample; - -public class Create_Semantic_Kernel_Chat_Agent -{ - public static async Task RunAsync() - { - #region Create_Kernel - var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var modelId = "gpt-3.5-turbo"; - var kernel = Kernel.CreateBuilder() - .AddOpenAIChatCompletion(modelId: modelId, apiKey: openAIKey) - .Build(); - #endregion Create_Kernel - - #region Create_ChatCompletionAgent - // The built-in ChatCompletionAgent from semantic kernel. - var chatAgent = new ChatCompletionAgent() - { - Kernel = kernel, - Name = "assistant", - Description = "You are a helpful AI assistant", - }; - #endregion Create_ChatCompletionAgent - - #region Create_SemanticKernelChatCompletionAgent - var messageConnector = new SemanticKernelChatMessageContentConnector(); - var skAgent = new SemanticKernelChatCompletionAgent(chatAgent) - .RegisterMiddleware(messageConnector) // register message connector so it support AutoGen built-in message types like TextMessage. - .RegisterPrintMessage(); // pretty print the message to the console - #endregion Create_SemanticKernelChatCompletionAgent - - #region Send_Message - await skAgent.SendAsync("Hey tell me a long tedious joke"); - #endregion Send_Message - } -} diff --git a/dotnet/sample/AutoGen.SemanticKernel.Sample/Program.cs b/dotnet/sample/AutoGen.SemanticKernel.Sample/Program.cs deleted file mode 100644 index 6e9389313b..0000000000 --- a/dotnet/sample/AutoGen.SemanticKernel.Sample/Program.cs +++ /dev/null @@ -1,12 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Program.cs - -using AutoGen.SemanticKernel.Sample; - -await Use_Kernel_Functions_With_Other_Agent.RunAsync(); diff --git a/dotnet/sample/AutoGen.SemanticKernel.Sample/Use_Bing_Search_With_Semantic_Kernel_Agent.cs b/dotnet/sample/AutoGen.SemanticKernel.Sample/Use_Bing_Search_With_Semantic_Kernel_Agent.cs deleted file mode 100644 index 28400e9382..0000000000 --- a/dotnet/sample/AutoGen.SemanticKernel.Sample/Use_Bing_Search_With_Semantic_Kernel_Agent.cs +++ /dev/null @@ -1,43 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Use_Bing_Search_With_Semantic_Kernel_Agent.cs - -using AutoGen.Core; -using AutoGen.SemanticKernel.Extension; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Plugins.Web; -using Microsoft.SemanticKernel.Plugins.Web.Bing; - -namespace AutoGen.SemanticKernel.Sample; - -public class Use_Bing_Search_With_Semantic_Kernel_Agent -{ - public static async Task RunAsync() - { - var bingApiKey = Environment.GetEnvironmentVariable("BING_API_KEY") ?? throw new Exception("BING_API_KEY environment variable is not set"); - var bingSearch = new BingConnector(bingApiKey); - var webSearchPlugin = new WebSearchEnginePlugin(bingSearch); - - var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var modelId = "gpt-3.5-turbo"; - var kernelBuilder = Kernel.CreateBuilder() - .AddOpenAIChatCompletion(modelId: modelId, apiKey: openAIKey); - kernelBuilder.Plugins.AddFromObject(webSearchPlugin); - - var kernel = kernelBuilder.Build(); - - var skAgent = new SemanticKernelAgent( - kernel: kernel, - name: "assistant", - systemMessage: "You are a helpful AI assistant") - .RegisterMessageConnector() // register message connector so it support AutoGen built-in message types like TextMessage. - .RegisterPrintMessage(); // pretty print the message to the console - - await skAgent.SendAsync("Tell me more about gpt-4-o"); - } -} diff --git a/dotnet/sample/AutoGen.SemanticKernel.Sample/Use_Kernel_Functions_With_Other_Agent.cs b/dotnet/sample/AutoGen.SemanticKernel.Sample/Use_Kernel_Functions_With_Other_Agent.cs deleted file mode 100644 index d4f5d01321..0000000000 --- a/dotnet/sample/AutoGen.SemanticKernel.Sample/Use_Kernel_Functions_With_Other_Agent.cs +++ /dev/null @@ -1,58 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Use_Kernel_Functions_With_Other_Agent.cs - -#region Using -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -using Microsoft.SemanticKernel; -#endregion Using - -namespace AutoGen.SemanticKernel.Sample; - -public class Use_Kernel_Functions_With_Other_Agent -{ - public static async Task RunAsync() - { - #region Create_plugin - var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var modelId = "gpt-3.5-turbo"; - var kernelBuilder = Kernel.CreateBuilder(); - var kernel = kernelBuilder.Build(); - var getWeatherFunction = KernelFunctionFactory.CreateFromMethod( - method: (string location) => $"The weather in {location} is 75 degrees Fahrenheit.", - functionName: "GetWeather", - description: "Get the weather for a location."); - var plugin = kernel.CreatePluginFromFunctions("my_plugin", [getWeatherFunction]); - #endregion Create_plugin - - #region Use_plugin - // Create a middleware to handle the plugin functions - var kernelPluginMiddleware = new KernelPluginMiddleware(kernel, plugin); - - var openAIClient = new OpenAIClient(openAIKey); - var openAIAgent = new OpenAIChatAgent( - openAIClient: openAIClient, - name: "assistant", - modelName: modelId) - .RegisterMessageConnector() // register message connector so it support AutoGen built-in message types like TextMessage. - .RegisterMiddleware(kernelPluginMiddleware) // register the middleware to handle the plugin functions - .RegisterPrintMessage(); // pretty print the message to the console - #endregion Use_plugin - - #region Send_message - var toolAggregateMessage = await openAIAgent.SendAsync("Tell me the weather in Seattle"); - - // The aggregate message will be converted to [ToolCallMessage, ToolCallResultMessage] when flowing into the agent - // send the aggregated message to llm to generate the final response - var finalReply = await openAIAgent.SendAsync(toolAggregateMessage); - #endregion Send_message - } -} diff --git a/dotnet/sample/AutoGen.WebAPI.Sample/AutoGen.WebAPI.Sample.csproj b/dotnet/sample/AutoGen.WebAPI.Sample/AutoGen.WebAPI.Sample.csproj deleted file mode 100644 index 76675ba123..0000000000 --- a/dotnet/sample/AutoGen.WebAPI.Sample/AutoGen.WebAPI.Sample.csproj +++ /dev/null @@ -1,13 +0,0 @@ -ο»Ώ - - - $(TestTargetFrameworks) - enable - enable - - - - - - - diff --git a/dotnet/sample/AutoGen.WebAPI.Sample/Program.cs b/dotnet/sample/AutoGen.WebAPI.Sample/Program.cs deleted file mode 100644 index 131bcc15eb..0000000000 --- a/dotnet/sample/AutoGen.WebAPI.Sample/Program.cs +++ /dev/null @@ -1,51 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Program.cs - -using System.Runtime.CompilerServices; -using AutoGen.Core; -using AutoGen.WebAPI; - -var alice = new DummyAgent("alice"); -var bob = new DummyAgent("bob"); - -var builder = WebApplication.CreateBuilder(args); -// Add services to the container. - -// run endpoint at port 5000 -builder.WebHost.UseUrls("http://localhost:5000"); -var app = builder.Build(); - -app.UseAgentAsOpenAIChatCompletionEndpoint(alice); -app.UseAgentAsOpenAIChatCompletionEndpoint(bob); - -app.Run(); - -public class DummyAgent : IStreamingAgent -{ - public DummyAgent(string name = "dummy") - { - Name = name; - } - - public string Name { get; } - - public async Task GenerateReplyAsync(IEnumerable messages, GenerateReplyOptions? options = null, CancellationToken cancellationToken = default) - { - return new TextMessage(Role.Assistant, $"I am dummy {this.Name}", this.Name); - } - - public async IAsyncEnumerable GenerateStreamingReplyAsync(IEnumerable messages, GenerateReplyOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var reply = $"I am dummy {this.Name}"; - foreach (var c in reply) - { - yield return new TextMessageUpdate(Role.Assistant, c.ToString(), this.Name); - }; - } -} diff --git a/dotnet/src/AutoGen.Anthropic/Agent/AnthropicClientAgent.cs b/dotnet/src/AutoGen.Anthropic/Agent/AnthropicClientAgent.cs deleted file mode 100644 index 04e58e84e1..0000000000 --- a/dotnet/src/AutoGen.Anthropic/Agent/AnthropicClientAgent.cs +++ /dev/null @@ -1,126 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// AnthropicClientAgent.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using AutoGen.Anthropic.DTO; -using AutoGen.Core; - -namespace AutoGen.Anthropic; - -public class AnthropicClientAgent : IStreamingAgent -{ - private readonly AnthropicClient _anthropicClient; - public string Name { get; } - private readonly string _modelName; - private readonly string _systemMessage; - private readonly decimal _temperature; - private readonly int _maxTokens; - private readonly Tool[]? _tools; - private readonly ToolChoice? _toolChoice; - - public AnthropicClientAgent( - AnthropicClient anthropicClient, - string name, - string modelName, - string systemMessage = "You are a helpful AI assistant", - decimal temperature = 0.7m, - int maxTokens = 1024, - Tool[]? tools = null, - ToolChoice? toolChoice = null) - { - Name = name; - _anthropicClient = anthropicClient; - _modelName = modelName; - _systemMessage = systemMessage; - _temperature = temperature; - _maxTokens = maxTokens; - _tools = tools; - _toolChoice = toolChoice; - } - - public async Task GenerateReplyAsync(IEnumerable messages, GenerateReplyOptions? options = null, - CancellationToken cancellationToken = default) - { - var response = await _anthropicClient.CreateChatCompletionsAsync(CreateParameters(messages, options, false), cancellationToken); - return new MessageEnvelope(response, from: this.Name); - } - - public async IAsyncEnumerable GenerateStreamingReplyAsync(IEnumerable messages, - GenerateReplyOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - await foreach (var message in _anthropicClient.StreamingChatCompletionsAsync( - CreateParameters(messages, options, true), cancellationToken)) - { - yield return new MessageEnvelope(message, from: this.Name); - } - } - - private ChatCompletionRequest CreateParameters(IEnumerable messages, GenerateReplyOptions? options, bool shouldStream) - { - var chatCompletionRequest = new ChatCompletionRequest() - { - SystemMessage = [new SystemMessage { Text = _systemMessage }], - MaxTokens = options?.MaxToken ?? _maxTokens, - Model = _modelName, - Stream = shouldStream, - Temperature = (decimal?)options?.Temperature ?? _temperature, - Tools = _tools?.ToList(), - ToolChoice = _toolChoice ?? (_tools is { Length: > 0 } ? ToolChoice.Auto : null), - StopSequences = options?.StopSequence?.ToArray(), - }; - - chatCompletionRequest.Messages = BuildMessages(messages); - - return chatCompletionRequest; - } - - private List BuildMessages(IEnumerable messages) - { - List chatMessages = new(); - foreach (IMessage? message in messages) - { - switch (message) - { - case IMessage chatMessage when chatMessage.Content.Role == "system": - throw new InvalidOperationException( - "system message has already been set and only one system message is supported. \"system\" role for input messages in the Message"); - - case IMessage chatMessage: - chatMessages.Add(chatMessage.Content); - break; - - default: - throw new ArgumentException($"Unexpected message type: {message?.GetType()}"); - } - } - - // merge messages with the same role - // fixing #2884 - var mergedMessages = chatMessages.Aggregate(new List(), (acc, message) => - { - if (acc.Count > 0 && acc.Last().Role == message.Role) - { - acc.Last().Content.AddRange(message.Content); - } - else - { - acc.Add(message); - } - - return acc; - }); - - return mergedMessages; - } -} diff --git a/dotnet/src/AutoGen.Anthropic/AnthropicClient.cs b/dotnet/src/AutoGen.Anthropic/AnthropicClient.cs deleted file mode 100644 index 6ebc89c9e5..0000000000 --- a/dotnet/src/AutoGen.Anthropic/AnthropicClient.cs +++ /dev/null @@ -1,208 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// AnthropicClient.cs - -using System; -using System.Collections.Generic; -using System.IO; -using System.Net.Http; -using System.Runtime.CompilerServices; -using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using AutoGen.Anthropic.Converters; -using AutoGen.Anthropic.DTO; - -namespace AutoGen.Anthropic; - -public sealed class AnthropicClient : IDisposable -{ - private readonly HttpClient _httpClient; - private readonly string _baseUrl; - - private static readonly JsonSerializerOptions JsonSerializerOptions = new() - { - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, - Converters = - { - new ContentBaseConverter(), - new JsonPropertyNameEnumConverter(), - new JsonPropertyNameEnumConverter(), - new SystemMessageConverter(), - } - }; - - public AnthropicClient(HttpClient httpClient, string baseUrl, string apiKey) - { - _httpClient = httpClient; - _baseUrl = baseUrl; - - _httpClient.DefaultRequestHeaders.Add("x-api-key", apiKey); - _httpClient.DefaultRequestHeaders.Add("anthropic-version", "2023-06-01"); - } - - public async Task CreateChatCompletionsAsync(ChatCompletionRequest chatCompletionRequest, - CancellationToken cancellationToken) - { - var httpResponseMessage = await SendRequestAsync(chatCompletionRequest, cancellationToken); - var responseStream = await httpResponseMessage.Content.ReadAsStreamAsync(); - - if (httpResponseMessage.IsSuccessStatusCode) - { - return await DeserializeResponseAsync(responseStream, cancellationToken); - } - - ErrorResponse res = await DeserializeResponseAsync(responseStream, cancellationToken); - throw new Exception(res.Error?.Message); - } - - public async IAsyncEnumerable StreamingChatCompletionsAsync( - ChatCompletionRequest chatCompletionRequest, [EnumeratorCancellation] CancellationToken cancellationToken) - { - var httpResponseMessage = await SendRequestAsync(chatCompletionRequest, cancellationToken); - using var reader = new StreamReader(await httpResponseMessage.Content.ReadAsStreamAsync()); - - var currentEvent = new SseEvent(); - - while (await reader.ReadLineAsync() is { } line) - { - if (!string.IsNullOrEmpty(line)) - { - if (line.StartsWith("event:")) - { - currentEvent.EventType = line.Substring("event:".Length).Trim(); - } - else if (line.StartsWith("data:")) - { - currentEvent.Data = line.Substring("data:".Length).Trim(); - } - } - else // an empty line indicates the end of an event - { - if (currentEvent.EventType == "content_block_start" && !string.IsNullOrEmpty(currentEvent.Data)) - { - var dataBlock = JsonSerializer.Deserialize(currentEvent.Data!); - if (dataBlock != null && dataBlock.ContentBlock?.Type == "tool_use") - { - currentEvent.ContentBlock = dataBlock.ContentBlock; - } - } - - if (currentEvent.EventType is "message_start" or "content_block_delta" or "message_delta" && currentEvent.Data != null) - { - var res = await JsonSerializer.DeserializeAsync( - new MemoryStream(Encoding.UTF8.GetBytes(currentEvent.Data)), - cancellationToken: cancellationToken) ?? throw new Exception("Failed to deserialize response"); - if (res.Delta?.Type == "input_json_delta" && !string.IsNullOrEmpty(res.Delta.PartialJson) && - currentEvent.ContentBlock != null) - { - currentEvent.ContentBlock.AppendDeltaParameters(res.Delta.PartialJson!); - } - else if (res.Delta is { StopReason: "tool_use" } && currentEvent.ContentBlock != null) - { - if (res.Content == null) - { - res.Content = [currentEvent.ContentBlock.CreateToolUseContent()]; - } - else - { - res.Content.Add(currentEvent.ContentBlock.CreateToolUseContent()); - } - - currentEvent = new SseEvent(); - } - - yield return res; - } - else if (currentEvent.EventType == "error" && currentEvent.Data != null) - { - var res = await JsonSerializer.DeserializeAsync( - new MemoryStream(Encoding.UTF8.GetBytes(currentEvent.Data)), cancellationToken: cancellationToken); - - throw new Exception(res?.Error?.Message); - } - - if (currentEvent.ContentBlock == null) - { - currentEvent = new SseEvent(); - } - } - } - } - - private Task SendRequestAsync(T requestObject, CancellationToken cancellationToken) - { - var httpRequestMessage = new HttpRequestMessage(HttpMethod.Post, _baseUrl); - var jsonRequest = JsonSerializer.Serialize(requestObject, JsonSerializerOptions); - httpRequestMessage.Content = new StringContent(jsonRequest, Encoding.UTF8, "application/json"); - httpRequestMessage.Headers.Add("anthropic-beta", "prompt-caching-2024-07-31"); - return _httpClient.SendAsync(httpRequestMessage, cancellationToken); - } - - private async Task DeserializeResponseAsync(Stream responseStream, CancellationToken cancellationToken) - { - return await JsonSerializer.DeserializeAsync(responseStream, JsonSerializerOptions, cancellationToken) - ?? throw new Exception("Failed to deserialize response"); - } - - public void Dispose() - { - _httpClient.Dispose(); - } - - private struct SseEvent - { - public string EventType { get; set; } - public string? Data { get; set; } - public ContentBlock? ContentBlock { get; set; } - - public SseEvent(string eventType, string? data = null, ContentBlock? contentBlock = null) - { - EventType = eventType; - Data = data; - ContentBlock = contentBlock; - } - } - - private class ContentBlock - { - [JsonPropertyName("type")] - public string? Type { get; set; } - - [JsonPropertyName("id")] - public string? Id { get; set; } - - [JsonPropertyName("name")] - public string? Name { get; set; } - - [JsonPropertyName("input")] - public object? Input { get; set; } - - public string? parameters { get; set; } - - public void AppendDeltaParameters(string deltaParams) - { - StringBuilder sb = new StringBuilder(parameters); - sb.Append(deltaParams); - parameters = sb.ToString(); - } - - public ToolUseContent CreateToolUseContent() - { - return new ToolUseContent { Id = Id, Name = Name, Input = parameters }; - } - } - - private class DataBlock - { - [JsonPropertyName("content_block")] - public ContentBlock? ContentBlock { get; set; } - } -} diff --git a/dotnet/src/AutoGen.Anthropic/AutoGen.Anthropic.csproj b/dotnet/src/AutoGen.Anthropic/AutoGen.Anthropic.csproj deleted file mode 100644 index a4fd32e7e3..0000000000 --- a/dotnet/src/AutoGen.Anthropic/AutoGen.Anthropic.csproj +++ /dev/null @@ -1,22 +0,0 @@ -ο»Ώ - - - $(PackageTargetFrameworks) - AutoGen.Anthropic - - - - - - - AutoGen.Anthropic - - Provide support for consuming Anthropic models in AutoGen - - - - - - - - diff --git a/dotnet/src/AutoGen.Anthropic/Converters/ContentBaseConverter.cs b/dotnet/src/AutoGen.Anthropic/Converters/ContentBaseConverter.cs deleted file mode 100644 index 7ad1c8e95e..0000000000 --- a/dotnet/src/AutoGen.Anthropic/Converters/ContentBaseConverter.cs +++ /dev/null @@ -1,45 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ContentBaseConverter.cs - -using System; -using System.Text.Json; -using System.Text.Json.Serialization; -using AutoGen.Anthropic.DTO; -namespace AutoGen.Anthropic.Converters; - -public sealed class ContentBaseConverter : JsonConverter -{ - public override ContentBase Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) - { - using var doc = JsonDocument.ParseValue(ref reader); - if (doc.RootElement.TryGetProperty("type", out JsonElement typeProperty) && !string.IsNullOrEmpty(typeProperty.GetString())) - { - string? type = typeProperty.GetString(); - var text = doc.RootElement.GetRawText(); - switch (type) - { - case "text": - return JsonSerializer.Deserialize(text, options) ?? throw new InvalidOperationException(); - case "image": - return JsonSerializer.Deserialize(text, options) ?? throw new InvalidOperationException(); - case "tool_use": - return JsonSerializer.Deserialize(text, options) ?? throw new InvalidOperationException(); - case "tool_result": - return JsonSerializer.Deserialize(text, options) ?? throw new InvalidOperationException(); - } - } - - throw new JsonException("Unknown content type"); - } - - public override void Write(Utf8JsonWriter writer, ContentBase value, JsonSerializerOptions options) - { - JsonSerializer.Serialize(writer, value, value.GetType(), options); - } -} diff --git a/dotnet/src/AutoGen.Anthropic/Converters/JsonPropertyNameEnumCoverter.cs b/dotnet/src/AutoGen.Anthropic/Converters/JsonPropertyNameEnumCoverter.cs deleted file mode 100644 index bdc8ce58ab..0000000000 --- a/dotnet/src/AutoGen.Anthropic/Converters/JsonPropertyNameEnumCoverter.cs +++ /dev/null @@ -1,50 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// JsonPropertyNameEnumCoverter.cs - -using System; -using System.Reflection; -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace AutoGen.Anthropic.Converters; - -internal class JsonPropertyNameEnumConverter : JsonConverter where T : struct, Enum -{ - public override T Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) - { - string value = reader.GetString() ?? throw new JsonException("Value was null."); - - foreach (var field in typeToConvert.GetFields()) - { - var attribute = field.GetCustomAttribute(); - if (attribute?.Name == value) - { - return (T)Enum.Parse(typeToConvert, field.Name); - } - } - - throw new JsonException($"Unable to convert \"{value}\" to enum {typeToConvert}."); - } - - public override void Write(Utf8JsonWriter writer, T value, JsonSerializerOptions options) - { - var field = value.GetType().GetField(value.ToString()); - var attribute = field?.GetCustomAttribute(); - - if (attribute != null) - { - writer.WriteStringValue(attribute.Name); - } - else - { - writer.WriteStringValue(value.ToString()); - } - } -} - diff --git a/dotnet/src/AutoGen.Anthropic/Converters/SystemMessageConverter.cs b/dotnet/src/AutoGen.Anthropic/Converters/SystemMessageConverter.cs deleted file mode 100644 index 1f7a37ba25..0000000000 --- a/dotnet/src/AutoGen.Anthropic/Converters/SystemMessageConverter.cs +++ /dev/null @@ -1,48 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// SystemMessageConverter.cs - -using System; -using System.Text.Json; -using System.Text.Json.Serialization; -using AutoGen.Anthropic.DTO; - -namespace AutoGen.Anthropic.Converters; - -public class SystemMessageConverter : JsonConverter -{ - public override object Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) - { - if (reader.TokenType == JsonTokenType.String) - { - return reader.GetString() ?? string.Empty; - } - if (reader.TokenType == JsonTokenType.StartArray) - { - return JsonSerializer.Deserialize(ref reader, options) ?? throw new InvalidOperationException(); - } - - throw new JsonException(); - } - - public override void Write(Utf8JsonWriter writer, object value, JsonSerializerOptions options) - { - if (value is string stringValue) - { - writer.WriteStringValue(stringValue); - } - else if (value is SystemMessage[] arrayValue) - { - JsonSerializer.Serialize(writer, arrayValue, options); - } - else - { - throw new JsonException(); - } - } -} diff --git a/dotnet/src/AutoGen.Anthropic/DTO/ChatCompletionRequest.cs b/dotnet/src/AutoGen.Anthropic/DTO/ChatCompletionRequest.cs deleted file mode 100644 index d5b7d304b3..0000000000 --- a/dotnet/src/AutoGen.Anthropic/DTO/ChatCompletionRequest.cs +++ /dev/null @@ -1,99 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ChatCompletionRequest.cs -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace AutoGen.Anthropic.DTO; - -public class ChatCompletionRequest -{ - [JsonPropertyName("model")] - public string? Model { get; set; } - - [JsonPropertyName("messages")] - public List Messages { get; set; } - - [JsonPropertyName("system")] - public SystemMessage[]? SystemMessage { get; set; } - - [JsonPropertyName("max_tokens")] - public int MaxTokens { get; set; } - - [JsonPropertyName("metadata")] - public object? Metadata { get; set; } - - [JsonPropertyName("stop_sequences")] - public string[]? StopSequences { get; set; } - - [JsonPropertyName("stream")] - public bool? Stream { get; set; } - - [JsonPropertyName("temperature")] - public decimal? Temperature { get; set; } - - [JsonPropertyName("top_k")] - public int? TopK { get; set; } - - [JsonPropertyName("top_p")] - public decimal? TopP { get; set; } - - [JsonPropertyName("tools")] - public List? Tools { get; set; } - - [JsonPropertyName("tool_choice")] - public ToolChoice? ToolChoice { get; set; } - - public ChatCompletionRequest() - { - Messages = new List(); - } -} - -public class SystemMessage -{ - [JsonPropertyName("text")] - public string? Text { get; set; } - - [JsonPropertyName("type")] - public string? Type { get; private set; } = "text"; - - [JsonPropertyName("cache_control")] - public CacheControl? CacheControl { get; set; } - - public static SystemMessage CreateSystemMessage(string systemMessage) => new() { Text = systemMessage }; - - public static SystemMessage CreateSystemMessageWithCacheControl(string systemMessage) => new() - { - Text = systemMessage, - CacheControl = new CacheControl { Type = CacheControlType.Ephemeral } - }; -} - -public class ChatMessage -{ - [JsonPropertyName("role")] - public string Role { get; set; } - - [JsonPropertyName("content")] - public List Content { get; set; } - - public ChatMessage(string role, string content) - { - Role = role; - Content = new List() { new TextContent { Text = content } }; - } - - public ChatMessage(string role, List content) - { - Role = role; - Content = content; - } - - public void AddContent(ContentBase content) => Content.Add(content); -} diff --git a/dotnet/src/AutoGen.Anthropic/DTO/ChatCompletionResponse.cs b/dotnet/src/AutoGen.Anthropic/DTO/ChatCompletionResponse.cs deleted file mode 100644 index afa2244104..0000000000 --- a/dotnet/src/AutoGen.Anthropic/DTO/ChatCompletionResponse.cs +++ /dev/null @@ -1,103 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ChatCompletionResponse.cs - - -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace AutoGen.Anthropic.DTO; -public class ChatCompletionResponse -{ - [JsonPropertyName("content")] - public List? Content { get; set; } - - [JsonPropertyName("id")] - public string? Id { get; set; } - - [JsonPropertyName("model")] - public string? Model { get; set; } - - [JsonPropertyName("role")] - public string? Role { get; set; } - - [JsonPropertyName("stop_reason")] - public string? StopReason { get; set; } - - [JsonPropertyName("stop_sequence")] - public object? StopSequence { get; set; } - - [JsonPropertyName("type")] - public string? Type { get; set; } - - [JsonPropertyName("usage")] - public Usage? Usage { get; set; } - - [JsonPropertyName("delta")] - public Delta? Delta { get; set; } - - [JsonPropertyName("message")] - public StreamingMessage? streamingMessage { get; set; } -} - -public class StreamingMessage -{ - [JsonPropertyName("id")] - public string? Id { get; set; } - - [JsonPropertyName("type")] - public string? Type { get; set; } - - [JsonPropertyName("role")] - public string? Role { get; set; } - - [JsonPropertyName("model")] - public string? Model { get; set; } - - [JsonPropertyName("stop_reason")] - public object? StopReason { get; set; } - - [JsonPropertyName("stop_sequence")] - public object? StopSequence { get; set; } - - [JsonPropertyName("usage")] - public Usage? Usage { get; set; } -} - -public class Usage -{ - [JsonPropertyName("input_tokens")] - public int InputTokens { get; set; } - - [JsonPropertyName("output_tokens")] - public int OutputTokens { get; set; } - - [JsonPropertyName("cache_creation_input_tokens")] - public int CacheCreationInputTokens { get; set; } - - [JsonPropertyName("cache_read_input_tokens")] - public int CacheReadInputTokens { get; set; } -} - -public class Delta -{ - [JsonPropertyName("stop_reason")] - public string? StopReason { get; set; } - - [JsonPropertyName("type")] - public string? Type { get; set; } - - [JsonPropertyName("text")] - public string? Text { get; set; } - - [JsonPropertyName("partial_json")] - public string? PartialJson { get; set; } - - [JsonPropertyName("usage")] - public Usage? Usage { get; set; } -} diff --git a/dotnet/src/AutoGen.Anthropic/DTO/Content.cs b/dotnet/src/AutoGen.Anthropic/DTO/Content.cs deleted file mode 100644 index 373cdcee1e..0000000000 --- a/dotnet/src/AutoGen.Anthropic/DTO/Content.cs +++ /dev/null @@ -1,101 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Content.cs - -using System.Text.Json.Nodes; -using System.Text.Json.Serialization; -using AutoGen.Anthropic.Converters; - -namespace AutoGen.Anthropic.DTO; - -public abstract class ContentBase -{ - [JsonPropertyName("type")] - public abstract string Type { get; } - - [JsonPropertyName("cache_control")] - public CacheControl? CacheControl { get; set; } -} - -public class TextContent : ContentBase -{ - [JsonPropertyName("type")] - public override string Type => "text"; - - [JsonPropertyName("text")] - public string? Text { get; set; } - - public static TextContent CreateTextWithCacheControl(string text) => new() - { - Text = text, - CacheControl = new CacheControl { Type = CacheControlType.Ephemeral } - }; -} - -public class ImageContent : ContentBase -{ - [JsonPropertyName("type")] - public override string Type => "image"; - - [JsonPropertyName("source")] - public ImageSource? Source { get; set; } -} - -public class ImageSource -{ - [JsonPropertyName("type")] - public string Type => "base64"; - - [JsonPropertyName("media_type")] - public string? MediaType { get; set; } - - [JsonPropertyName("data")] - public string? Data { get; set; } -} - -public class ToolUseContent : ContentBase -{ - [JsonPropertyName("type")] - public override string Type => "tool_use"; - - [JsonPropertyName("id")] - public string? Id { get; set; } - - [JsonPropertyName("name")] - public string? Name { get; set; } - - [JsonPropertyName("input")] - public JsonNode? Input { get; set; } -} - -public class ToolResultContent : ContentBase -{ - [JsonPropertyName("type")] - public override string Type => "tool_result"; - - [JsonPropertyName("tool_use_id")] - public string? Id { get; set; } - - [JsonPropertyName("content")] - public string? Content { get; set; } -} - -public class CacheControl -{ - [JsonPropertyName("type")] - public CacheControlType Type { get; set; } - - public static CacheControl Create() => new CacheControl { Type = CacheControlType.Ephemeral }; -} - -[JsonConverter(typeof(JsonPropertyNameEnumConverter))] -public enum CacheControlType -{ - [JsonPropertyName("ephemeral")] - Ephemeral -} diff --git a/dotnet/src/AutoGen.Anthropic/DTO/ErrorResponse.cs b/dotnet/src/AutoGen.Anthropic/DTO/ErrorResponse.cs deleted file mode 100644 index ae35cab425..0000000000 --- a/dotnet/src/AutoGen.Anthropic/DTO/ErrorResponse.cs +++ /dev/null @@ -1,27 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ErrorResponse.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.Anthropic.DTO; - -public sealed class ErrorResponse -{ - [JsonPropertyName("error")] - public Error? Error { get; set; } -} - -public sealed class Error -{ - [JsonPropertyName("Type")] - public string? Type { get; set; } - - [JsonPropertyName("message")] - public string? Message { get; set; } -} diff --git a/dotnet/src/AutoGen.Anthropic/DTO/Tool.cs b/dotnet/src/AutoGen.Anthropic/DTO/Tool.cs deleted file mode 100644 index c1d83af72d..0000000000 --- a/dotnet/src/AutoGen.Anthropic/DTO/Tool.cs +++ /dev/null @@ -1,49 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Tool.cs - -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace AutoGen.Anthropic.DTO; - -public class Tool -{ - [JsonPropertyName("name")] - public string? Name { get; set; } - - [JsonPropertyName("description")] - public string? Description { get; set; } - - [JsonPropertyName("input_schema")] - public InputSchema? InputSchema { get; set; } - - [JsonPropertyName("cache_control")] - public CacheControl? CacheControl { get; set; } -} - -public class InputSchema -{ - [JsonPropertyName("type")] - public string? Type { get; set; } - - [JsonPropertyName("properties")] - public Dictionary? Properties { get; set; } - - [JsonPropertyName("required")] - public List? Required { get; set; } -} - -public class SchemaProperty -{ - [JsonPropertyName("type")] - public string? Type { get; set; } - - [JsonPropertyName("description")] - public string? Description { get; set; } -} diff --git a/dotnet/src/AutoGen.Anthropic/DTO/ToolChoice.cs b/dotnet/src/AutoGen.Anthropic/DTO/ToolChoice.cs deleted file mode 100644 index 9431de09a4..0000000000 --- a/dotnet/src/AutoGen.Anthropic/DTO/ToolChoice.cs +++ /dev/null @@ -1,45 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ToolChoice.cs - -using System.Text.Json.Serialization; -using AutoGen.Anthropic.Converters; - -namespace AutoGen.Anthropic.DTO; - -[JsonConverter(typeof(JsonPropertyNameEnumConverter))] -public enum ToolChoiceType -{ - [JsonPropertyName("auto")] - Auto, // Default behavior - - [JsonPropertyName("any")] - Any, // Use any provided tool - - [JsonPropertyName("tool")] - Tool // Force a specific tool -} - -public class ToolChoice -{ - [JsonPropertyName("type")] - public ToolChoiceType Type { get; set; } - - [JsonPropertyName("name")] - public string? Name { get; set; } - - private ToolChoice(ToolChoiceType type, string? name = null) - { - Type = type; - Name = name; - } - - public static ToolChoice Auto => new(ToolChoiceType.Auto); - public static ToolChoice Any => new(ToolChoiceType.Any); - public static ToolChoice ToolUse(string name) => new(ToolChoiceType.Tool, name); -} diff --git a/dotnet/src/AutoGen.Anthropic/Extensions/AnthropicAgentExtension.cs b/dotnet/src/AutoGen.Anthropic/Extensions/AnthropicAgentExtension.cs deleted file mode 100644 index 4f351f2096..0000000000 --- a/dotnet/src/AutoGen.Anthropic/Extensions/AnthropicAgentExtension.cs +++ /dev/null @@ -1,40 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// AnthropicAgentExtension.cs - -using AutoGen.Anthropic.Middleware; -using AutoGen.Core; - -namespace AutoGen.Anthropic.Extensions; - -public static class AnthropicAgentExtension -{ - /// - /// Register an to the - /// - /// the connector to use. If null, a new instance of will be created. - public static MiddlewareStreamingAgent RegisterMessageConnector( - this AnthropicClientAgent agent, AnthropicMessageConnector? connector = null) - { - connector ??= new AnthropicMessageConnector(); - - return agent.RegisterStreamingMiddleware(connector); - } - - /// - /// Register an to the where T is - /// - /// the connector to use. If null, a new instance of will be created. - public static MiddlewareStreamingAgent RegisterMessageConnector( - this MiddlewareStreamingAgent agent, AnthropicMessageConnector? connector = null) - { - connector ??= new AnthropicMessageConnector(); - - return agent.RegisterStreamingMiddleware(connector); - } -} diff --git a/dotnet/src/AutoGen.Anthropic/Middleware/AnthropicMessageConnector.cs b/dotnet/src/AutoGen.Anthropic/Middleware/AnthropicMessageConnector.cs deleted file mode 100644 index ef063339fa..0000000000 --- a/dotnet/src/AutoGen.Anthropic/Middleware/AnthropicMessageConnector.cs +++ /dev/null @@ -1,291 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// AnthropicMessageConnector.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Net.Http; -using System.Runtime.CompilerServices; -using System.Text.Json.Nodes; -using System.Threading; -using System.Threading.Tasks; -using AutoGen.Anthropic.DTO; -using AutoGen.Core; - -namespace AutoGen.Anthropic.Middleware; - -public class AnthropicMessageConnector : IStreamingMiddleware -{ - public string? Name => nameof(AnthropicMessageConnector); - - public async Task InvokeAsync(MiddlewareContext context, IAgent agent, CancellationToken cancellationToken = default) - { - var messages = context.Messages; - var chatMessages = await ProcessMessageAsync(messages, agent); - var response = await agent.GenerateReplyAsync(chatMessages, context.Options, cancellationToken); - - return response is IMessage chatMessage - ? PostProcessMessage(chatMessage.Content, agent) - : response; - } - - public async IAsyncEnumerable InvokeAsync(MiddlewareContext context, IStreamingAgent agent, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var messages = context.Messages; - var chatMessages = await ProcessMessageAsync(messages, agent); - - await foreach (var reply in agent.GenerateStreamingReplyAsync(chatMessages, context.Options, cancellationToken)) - { - if (reply is IMessage chatMessage) - { - var response = ProcessChatCompletionResponse(chatMessage, agent); - if (response is not null) - { - yield return response; - } - } - else - { - yield return reply; - } - } - } - - private IMessage? ProcessChatCompletionResponse(IMessage chatMessage, - IStreamingAgent agent) - { - if (chatMessage.Content.Content is { Count: 1 } && - chatMessage.Content.Content[0] is ToolUseContent toolUseContent) - { - return new ToolCallMessage( - toolUseContent.Name ?? - throw new InvalidOperationException($"Expected {nameof(toolUseContent.Name)} to be specified"), - toolUseContent.Input?.ToString() ?? - throw new InvalidOperationException($"Expected {nameof(toolUseContent.Input)} to be specified"), - from: agent.Name); - } - - var delta = chatMessage.Content.Delta; - return delta != null && !string.IsNullOrEmpty(delta.Text) - ? new TextMessageUpdate(role: Role.Assistant, delta.Text, from: agent.Name) - : null; - } - - private async Task> ProcessMessageAsync(IEnumerable messages, IAgent agent) - { - var processedMessages = new List(); - - foreach (var message in messages) - { - var processedMessage = message switch - { - TextMessage textMessage => ProcessTextMessage(textMessage, agent), - - ImageMessage imageMessage => - (MessageEnvelope[])[new MessageEnvelope(new ChatMessage("user", - new ContentBase[] { new ImageContent { Source = await ProcessImageSourceAsync(imageMessage) } } - .ToList()), - from: agent.Name)], - - MultiModalMessage multiModalMessage => await ProcessMultiModalMessageAsync(multiModalMessage, agent), - - ToolCallMessage toolCallMessage => ProcessToolCallMessage(toolCallMessage, agent), - ToolCallResultMessage toolCallResultMessage => ProcessToolCallResultMessage(toolCallResultMessage), - AggregateMessage toolCallAggregateMessage => ProcessToolCallAggregateMessage(toolCallAggregateMessage, agent), - _ => [message], - }; - - processedMessages.AddRange(processedMessage); - } - - return processedMessages; - } - - private IMessage PostProcessMessage(ChatCompletionResponse response, IAgent from) - { - if (response.Content is null) - { - throw new ArgumentNullException(nameof(response.Content)); - } - - // When expecting a tool call, sometimes the response will contain two messages, one chat and one tool. - // The first message is typically a TextContent, of the LLM explaining what it is trying to do. - // The second message contains the tool call. - if (response.Content.Count > 1) - { - if (response.Content.Count == 2 && response.Content[0] is TextContent && - response.Content[1] is ToolUseContent toolUseContent) - { - return new ToolCallMessage(toolUseContent.Name ?? string.Empty, - toolUseContent.Input?.ToJsonString() ?? string.Empty, - from: from.Name); - } - - throw new NotSupportedException($"Expected {nameof(response.Content)} to have one output"); - } - - var content = response.Content[0]; - switch (content) - { - case TextContent textContent: - return new TextMessage(Role.Assistant, textContent.Text ?? string.Empty, from: from.Name); - - case ToolUseContent toolUseContent: - return new ToolCallMessage(toolUseContent.Name ?? string.Empty, - toolUseContent.Input?.ToJsonString() ?? string.Empty, - from: from.Name); - - case ImageContent: - throw new InvalidOperationException( - "Claude is an image understanding model only. It can interpret and analyze images, but it cannot generate, produce, edit, manipulate or create images"); - default: - throw new ArgumentOutOfRangeException(nameof(content)); - } - } - - private IEnumerable> ProcessTextMessage(TextMessage textMessage, IAgent agent) - { - ChatMessage messages; - - if (textMessage.From == agent.Name) - { - messages = new ChatMessage( - "assistant", textMessage.Content); - } - else if (textMessage.From is null) - { - if (textMessage.Role == Role.User) - { - messages = new ChatMessage( - "user", textMessage.Content); - } - else if (textMessage.Role == Role.Assistant) - { - messages = new ChatMessage( - "assistant", textMessage.Content); - } - else if (textMessage.Role == Role.System) - { - messages = new ChatMessage( - "system", textMessage.Content); - } - else - { - throw new NotSupportedException($"Role {textMessage.Role} is not supported"); - } - } - else - { - // if from is not null, then the message is from user - messages = new ChatMessage( - "user", textMessage.Content); - } - - return [new MessageEnvelope(messages, from: textMessage.From)]; - } - - private async Task> ProcessMultiModalMessageAsync(MultiModalMessage multiModalMessage, IAgent agent) - { - var content = new List(); - foreach (var message in multiModalMessage.Content) - { - switch (message) - { - case TextMessage textMessage when textMessage.GetContent() is not null: - content.Add(new TextContent { Text = textMessage.GetContent() }); - break; - case ImageMessage imageMessage: - content.Add(new ImageContent() { Source = await ProcessImageSourceAsync(imageMessage) }); - break; - } - } - - return [MessageEnvelope.Create(new ChatMessage("user", content), agent.Name)]; - } - - private async Task ProcessImageSourceAsync(ImageMessage imageMessage) - { - if (imageMessage.Data != null) - { - return new ImageSource - { - MediaType = imageMessage.Data.MediaType, - Data = Convert.ToBase64String(imageMessage.Data.ToArray()) - }; - } - - if (imageMessage.Url is null) - { - throw new InvalidOperationException("Invalid ImageMessage, the data or url must be provided"); - } - - var uri = new Uri(imageMessage.Url); - using var client = new HttpClient(); - var response = client.GetAsync(uri).Result; - if (!response.IsSuccessStatusCode) - { - throw new HttpRequestException($"Failed to download the image from {uri}"); - } - - return new ImageSource - { - MediaType = "image/jpeg", - Data = Convert.ToBase64String(await response.Content.ReadAsByteArrayAsync()) - }; - } - - private IEnumerable ProcessToolCallMessage(ToolCallMessage toolCallMessage, IAgent agent) - { - var chatMessage = new ChatMessage("assistant", new List()); - foreach (var toolCall in toolCallMessage.ToolCalls) - { - chatMessage.AddContent(new ToolUseContent - { - Id = toolCall.ToolCallId, - Name = toolCall.FunctionName, - Input = JsonNode.Parse(toolCall.FunctionArguments) - }); - } - - return [MessageEnvelope.Create(chatMessage, toolCallMessage.From)]; - } - - private IEnumerable ProcessToolCallResultMessage(ToolCallResultMessage toolCallResultMessage) - { - var chatMessage = new ChatMessage("user", new List()); - foreach (var toolCall in toolCallResultMessage.ToolCalls) - { - chatMessage.AddContent(new ToolResultContent - { - Id = toolCall.ToolCallId ?? string.Empty, - Content = toolCall.Result, - }); - } - - return [MessageEnvelope.Create(chatMessage, toolCallResultMessage.From)]; - } - - private IEnumerable ProcessToolCallAggregateMessage(AggregateMessage aggregateMessage, IAgent agent) - { - if (aggregateMessage.From is { } from && from != agent.Name) - { - var contents = aggregateMessage.Message2.ToolCalls.Select(t => t.Result); - var messages = contents.Select(c => - new ChatMessage("assistant", c ?? throw new ArgumentNullException(nameof(c)))); - - return messages.Select(m => new MessageEnvelope(m, from: from)); - } - - var toolCallMessage = ProcessToolCallMessage(aggregateMessage.Message1, agent); - var toolCallResult = ProcessToolCallResultMessage(aggregateMessage.Message2); - - return toolCallMessage.Concat(toolCallResult); - } -} diff --git a/dotnet/src/AutoGen.Anthropic/Utils/AnthropicConstants.cs b/dotnet/src/AutoGen.Anthropic/Utils/AnthropicConstants.cs deleted file mode 100644 index cffab557a2..0000000000 --- a/dotnet/src/AutoGen.Anthropic/Utils/AnthropicConstants.cs +++ /dev/null @@ -1,21 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// AnthropicConstants.cs - -namespace AutoGen.Anthropic.Utils; - -public static class AnthropicConstants -{ - public static string Endpoint = "https://api.anthropic.com/v1/messages"; - - // Models - public static string Claude3Opus = "claude-3-opus-20240229"; - public static string Claude3Sonnet = "claude-3-sonnet-20240229"; - public static string Claude3Haiku = "claude-3-haiku-20240307"; - public static string Claude35Sonnet = "claude-3-5-sonnet-20240620"; -} diff --git a/dotnet/src/AutoGen.AzureAIInference/Agent/ChatCompletionsClientAgent.cs b/dotnet/src/AutoGen.AzureAIInference/Agent/ChatCompletionsClientAgent.cs deleted file mode 100644 index 746807538d..0000000000 --- a/dotnet/src/AutoGen.AzureAIInference/Agent/ChatCompletionsClientAgent.cs +++ /dev/null @@ -1,208 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ChatCompletionsClientAgent.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using AutoGen.AzureAIInference.Extension; -using AutoGen.Core; -using Azure.AI.Inference; - -namespace AutoGen.AzureAIInference; - -/// -/// ChatCompletions client agent. This agent is a thin wrapper around to provide a simple interface for chat completions. -/// supports the following message types: -/// -/// -/// where T is : chat request message. -/// -/// -/// returns the following message types: -/// -/// -/// where T is : chat response message. -/// where T is : streaming chat completions update. -/// -/// -/// -public class ChatCompletionsClientAgent : IStreamingAgent -{ - private readonly ChatCompletionsClient chatCompletionsClient; - private readonly ChatCompletionsOptions options; - private readonly string systemMessage; - - /// - /// Create a new instance of . - /// - /// chat completions client - /// agent name - /// model name. e.g. gpt-turbo-3.5 - /// system message - /// temperature - /// max tokens to generated - /// response format, set it to to enable json mode. - /// seed to use, set it to enable deterministic output - /// functions - public ChatCompletionsClientAgent( - ChatCompletionsClient chatCompletionsClient, - string name, - string modelName, - string systemMessage = "You are a helpful AI assistant", - float temperature = 0.7f, - int maxTokens = 1024, - int? seed = null, - ChatCompletionsResponseFormat? responseFormat = null, - IEnumerable? functions = null) - : this( - chatCompletionsClient: chatCompletionsClient, - name: name, - options: CreateChatCompletionOptions(modelName, temperature, maxTokens, seed, responseFormat, functions), - systemMessage: systemMessage) - { - } - - /// - /// Create a new instance of . - /// - /// chat completions client - /// agent name - /// system message - /// chat completion option. The option can't contain messages - public ChatCompletionsClientAgent( - ChatCompletionsClient chatCompletionsClient, - string name, - ChatCompletionsOptions options, - string systemMessage = "You are a helpful AI assistant") - { - if (options.Messages is { Count: > 0 }) - { - throw new ArgumentException("Messages should not be provided in options"); - } - - this.chatCompletionsClient = chatCompletionsClient; - this.Name = name; - this.options = options; - this.systemMessage = systemMessage; - } - - public string Name { get; } - - public async Task GenerateReplyAsync( - IEnumerable messages, - GenerateReplyOptions? options = null, - CancellationToken cancellationToken = default) - { - var settings = this.CreateChatCompletionsOptions(options, messages); - var reply = await this.chatCompletionsClient.CompleteAsync(settings, cancellationToken: cancellationToken); - - return new MessageEnvelope(reply, from: this.Name); - } - - public async IAsyncEnumerable GenerateStreamingReplyAsync( - IEnumerable messages, - GenerateReplyOptions? options = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var settings = this.CreateChatCompletionsOptions(options, messages); - var response = await this.chatCompletionsClient.CompleteStreamingAsync(settings, cancellationToken); - await foreach (var update in response.WithCancellation(cancellationToken)) - { - yield return new MessageEnvelope(update, from: this.Name); - } - } - - private ChatCompletionsOptions CreateChatCompletionsOptions(GenerateReplyOptions? options, IEnumerable messages) - { - var oaiMessages = messages.Select(m => m switch - { - IMessage chatRequestMessage => chatRequestMessage.Content, - _ => throw new ArgumentException("Invalid message type") - }); - - // add system message if there's no system message in messages - if (!oaiMessages.Any(m => m is ChatRequestSystemMessage)) - { - oaiMessages = new[] { new ChatRequestSystemMessage(systemMessage) }.Concat(oaiMessages); - } - - // clone the options by serializing and deserializing - var json = JsonSerializer.Serialize(this.options); - var settings = JsonSerializer.Deserialize(json) ?? throw new InvalidOperationException("Failed to clone options"); - - foreach (var m in oaiMessages) - { - settings.Messages.Add(m); - } - - settings.Temperature = options?.Temperature ?? settings.Temperature; - settings.MaxTokens = options?.MaxToken ?? settings.MaxTokens; - - foreach (var functions in this.options.Tools) - { - settings.Tools.Add(functions); - } - - foreach (var stopSequence in this.options.StopSequences) - { - settings.StopSequences.Add(stopSequence); - } - - var openAIFunctionDefinitions = options?.Functions?.Select(f => f.ToAzureAIInferenceFunctionDefinition()).ToList(); - if (openAIFunctionDefinitions is { Count: > 0 }) - { - foreach (var f in openAIFunctionDefinitions) - { - settings.Tools.Add(new ChatCompletionsFunctionToolDefinition(f)); - } - } - - if (options?.StopSequence is var sequence && sequence is { Length: > 0 }) - { - foreach (var seq in sequence) - { - settings.StopSequences.Add(seq); - } - } - - return settings; - } - - private static ChatCompletionsOptions CreateChatCompletionOptions( - string modelName, - float temperature = 0.7f, - int maxTokens = 1024, - int? seed = null, - ChatCompletionsResponseFormat? responseFormat = null, - IEnumerable? functions = null) - { - var options = new ChatCompletionsOptions() - { - Model = modelName, - Temperature = temperature, - MaxTokens = maxTokens, - Seed = seed, - ResponseFormat = responseFormat, - }; - - if (functions is not null) - { - foreach (var f in functions) - { - options.Tools.Add(new ChatCompletionsFunctionToolDefinition(f)); - } - } - - return options; - } -} diff --git a/dotnet/src/AutoGen.AzureAIInference/AutoGen.AzureAIInference.csproj b/dotnet/src/AutoGen.AzureAIInference/AutoGen.AzureAIInference.csproj deleted file mode 100644 index e9401bc4bc..0000000000 --- a/dotnet/src/AutoGen.AzureAIInference/AutoGen.AzureAIInference.csproj +++ /dev/null @@ -1,25 +0,0 @@ -ο»Ώ - - $(PackageTargetFrameworks) - AutoGen.AzureAIInference - - - - - - - AutoGen.AzureAIInference - - Azure AI Inference Intergration for AutoGen. - - - - - - - - - - - - diff --git a/dotnet/src/AutoGen.AzureAIInference/Extension/ChatComptionClientAgentExtension.cs b/dotnet/src/AutoGen.AzureAIInference/Extension/ChatComptionClientAgentExtension.cs deleted file mode 100644 index f254a13856..0000000000 --- a/dotnet/src/AutoGen.AzureAIInference/Extension/ChatComptionClientAgentExtension.cs +++ /dev/null @@ -1,45 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ChatComptionClientAgentExtension.cs - -using AutoGen.Core; - -namespace AutoGen.AzureAIInference.Extension; - -public static class ChatComptionClientAgentExtension -{ - /// - /// Register an to the - /// - /// the connector to use. If null, a new instance of will be created. - public static MiddlewareStreamingAgent RegisterMessageConnector( - this ChatCompletionsClientAgent agent, AzureAIInferenceChatRequestMessageConnector? connector = null) - { - if (connector == null) - { - connector = new AzureAIInferenceChatRequestMessageConnector(); - } - - return agent.RegisterStreamingMiddleware(connector); - } - - /// - /// Register an to the where T is - /// - /// the connector to use. If null, a new instance of will be created. - public static MiddlewareStreamingAgent RegisterMessageConnector( - this MiddlewareStreamingAgent agent, AzureAIInferenceChatRequestMessageConnector? connector = null) - { - if (connector == null) - { - connector = new AzureAIInferenceChatRequestMessageConnector(); - } - - return agent.RegisterStreamingMiddleware(connector); - } -} diff --git a/dotnet/src/AutoGen.AzureAIInference/Extension/FunctionContractExtension.cs b/dotnet/src/AutoGen.AzureAIInference/Extension/FunctionContractExtension.cs deleted file mode 100644 index 8bf0aba747..0000000000 --- a/dotnet/src/AutoGen.AzureAIInference/Extension/FunctionContractExtension.cs +++ /dev/null @@ -1,70 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// FunctionContractExtension.cs - -using System; -using System.Collections.Generic; -using AutoGen.Core; -using Azure.AI.Inference; -using Json.Schema; -using Json.Schema.Generation; - -namespace AutoGen.AzureAIInference.Extension; - -public static class FunctionContractExtension -{ - /// - /// Convert a to a that can be used in gpt funciton call. - /// - /// function contract - /// - public static FunctionDefinition ToAzureAIInferenceFunctionDefinition(this FunctionContract functionContract) - { - var functionDefinition = new FunctionDefinition - { - Name = functionContract.Name, - Description = functionContract.Description, - }; - var requiredParameterNames = new List(); - var propertiesSchemas = new Dictionary(); - var propertySchemaBuilder = new JsonSchemaBuilder().Type(SchemaValueType.Object); - foreach (var param in functionContract.Parameters ?? []) - { - if (param.Name is null) - { - throw new InvalidOperationException("Parameter name cannot be null"); - } - - var schemaBuilder = new JsonSchemaBuilder().FromType(param.ParameterType ?? throw new ArgumentNullException(nameof(param.ParameterType))); - if (param.Description != null) - { - schemaBuilder = schemaBuilder.Description(param.Description); - } - - if (param.IsRequired) - { - requiredParameterNames.Add(param.Name); - } - - var schema = schemaBuilder.Build(); - propertiesSchemas[param.Name] = schema; - - } - propertySchemaBuilder = propertySchemaBuilder.Properties(propertiesSchemas); - propertySchemaBuilder = propertySchemaBuilder.Required(requiredParameterNames); - - var option = new System.Text.Json.JsonSerializerOptions() - { - PropertyNamingPolicy = System.Text.Json.JsonNamingPolicy.CamelCase - }; - - functionDefinition.Parameters = BinaryData.FromObjectAsJson(propertySchemaBuilder.Build(), option); - - return functionDefinition; - } -} diff --git a/dotnet/src/AutoGen.AzureAIInference/Middleware/AzureAIInferenceChatRequestMessageConnector.cs b/dotnet/src/AutoGen.AzureAIInference/Middleware/AzureAIInferenceChatRequestMessageConnector.cs deleted file mode 100644 index f411f689aa..0000000000 --- a/dotnet/src/AutoGen.AzureAIInference/Middleware/AzureAIInferenceChatRequestMessageConnector.cs +++ /dev/null @@ -1,308 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// AzureAIInferenceChatRequestMessageConnector.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using AutoGen.Core; -using Azure.AI.Inference; - -namespace AutoGen.AzureAIInference; - -/// -/// This middleware converts the incoming to where T is before sending to agent. And converts the output to after receiving from agent. -/// Supported are -/// - -/// - -/// - -/// - -/// - -/// - where T is -/// - where TMessage1 is and TMessage2 is -/// -public class AzureAIInferenceChatRequestMessageConnector : IStreamingMiddleware -{ - private bool strictMode = false; - - /// - /// Create a new instance of . - /// - /// If true, will throw an - /// When the message type is not supported. If false, it will ignore the unsupported message type. - public AzureAIInferenceChatRequestMessageConnector(bool strictMode = false) - { - this.strictMode = strictMode; - } - - public string? Name => nameof(AzureAIInferenceChatRequestMessageConnector); - - public async Task InvokeAsync(MiddlewareContext context, IAgent agent, CancellationToken cancellationToken = default) - { - var chatMessages = ProcessIncomingMessages(agent, context.Messages); - - var reply = await agent.GenerateReplyAsync(chatMessages, context.Options, cancellationToken); - - return PostProcessMessage(reply); - } - - public async IAsyncEnumerable InvokeAsync( - MiddlewareContext context, - IStreamingAgent agent, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var chatMessages = ProcessIncomingMessages(agent, context.Messages); - var streamingReply = agent.GenerateStreamingReplyAsync(chatMessages, context.Options, cancellationToken); - string? currentToolName = null; - await foreach (var reply in streamingReply) - { - if (reply is IMessage update) - { - if (update.Content.FunctionName is string functionName) - { - currentToolName = functionName; - } - else if (update.Content.ToolCallUpdate is StreamingFunctionToolCallUpdate toolCallUpdate && toolCallUpdate.Name is string toolCallName) - { - currentToolName = toolCallName; - } - var postProcessMessage = PostProcessStreamingMessage(update, currentToolName); - if (postProcessMessage != null) - { - yield return postProcessMessage; - } - } - else - { - if (this.strictMode) - { - throw new InvalidOperationException($"Invalid streaming message type {reply.GetType().Name}"); - } - else - { - yield return reply; - } - } - } - } - - public IMessage PostProcessMessage(IMessage message) - { - return message switch - { - IMessage m => PostProcessChatResponseMessage(m.Content, m.From), - IMessage m => PostProcessChatCompletions(m), - _ when strictMode is false => message, - _ => throw new InvalidOperationException($"Invalid return message type {message.GetType().Name}"), - }; - } - - public IMessage? PostProcessStreamingMessage(IMessage update, string? currentToolName) - { - if (update.Content.ContentUpdate is string contentUpdate && string.IsNullOrEmpty(contentUpdate) == false) - { - // text message - return new TextMessageUpdate(Role.Assistant, contentUpdate, from: update.From); - } - else if (update.Content.FunctionName is string functionName) - { - return new ToolCallMessageUpdate(functionName, string.Empty, from: update.From); - } - else if (update.Content.FunctionArgumentsUpdate is string functionArgumentsUpdate && currentToolName is string) - { - return new ToolCallMessageUpdate(currentToolName, functionArgumentsUpdate, from: update.From); - } - else if (update.Content.ToolCallUpdate is StreamingFunctionToolCallUpdate tooCallUpdate && currentToolName is string) - { - return new ToolCallMessageUpdate(tooCallUpdate.Name ?? currentToolName, tooCallUpdate.ArgumentsUpdate, from: update.From); - } - else - { - return null; - } - } - - private IMessage PostProcessChatCompletions(IMessage message) - { - // throw exception if prompt filter results is not null - if (message.Content.Choices[0].FinishReason == CompletionsFinishReason.ContentFiltered) - { - throw new InvalidOperationException("The content is filtered because its potential risk. Please try another input."); - } - - return PostProcessChatResponseMessage(message.Content.Choices[0].Message, message.From); - } - - private IMessage PostProcessChatResponseMessage(ChatResponseMessage chatResponseMessage, string? from) - { - var textContent = chatResponseMessage.Content; - if (chatResponseMessage.ToolCalls.Where(tc => tc is ChatCompletionsFunctionToolCall).Any()) - { - var functionToolCalls = chatResponseMessage.ToolCalls - .Where(tc => tc is ChatCompletionsFunctionToolCall) - .Select(tc => (ChatCompletionsFunctionToolCall)tc); - - var toolCalls = functionToolCalls.Select(tc => new ToolCall(tc.Name, tc.Arguments) { ToolCallId = tc.Id }); - - return new ToolCallMessage(toolCalls, from) - { - Content = textContent, - }; - } - - if (textContent is string content && !string.IsNullOrEmpty(content)) - { - return new TextMessage(Role.Assistant, content, from); - } - - throw new InvalidOperationException("Invalid ChatResponseMessage"); - } - - public IEnumerable ProcessIncomingMessages(IAgent agent, IEnumerable messages) - { - return messages.SelectMany(m => - { - if (m is IMessage crm) - { - return [crm]; - } - else - { - var chatRequestMessages = m switch - { - TextMessage textMessage => ProcessTextMessage(agent, textMessage), - ImageMessage imageMessage when (imageMessage.From is null || imageMessage.From != agent.Name) => ProcessImageMessage(agent, imageMessage), - MultiModalMessage multiModalMessage when (multiModalMessage.From is null || multiModalMessage.From != agent.Name) => ProcessMultiModalMessage(agent, multiModalMessage), - ToolCallMessage toolCallMessage when (toolCallMessage.From is null || toolCallMessage.From == agent.Name) => ProcessToolCallMessage(agent, toolCallMessage), - ToolCallResultMessage toolCallResultMessage => ProcessToolCallResultMessage(toolCallResultMessage), - AggregateMessage aggregateMessage => ProcessFunctionCallMiddlewareMessage(agent, aggregateMessage), - _ when strictMode is false => [], - _ => throw new InvalidOperationException($"Invalid message type: {m.GetType().Name}"), - }; - - if (chatRequestMessages.Any()) - { - return chatRequestMessages.Select(cm => MessageEnvelope.Create(cm, m.From)); - } - else - { - return [m]; - } - } - }); - } - - private IEnumerable ProcessTextMessage(IAgent agent, TextMessage message) - { - if (message.Role == Role.System) - { - return [new ChatRequestSystemMessage(message.Content)]; - } - - if (agent.Name == message.From) - { - return [new ChatRequestAssistantMessage { Content = message.Content }]; - } - else - { - return message.From switch - { - null when message.Role == Role.User => [new ChatRequestUserMessage(message.Content)], - null when message.Role == Role.Assistant => [new ChatRequestAssistantMessage() { Content = message.Content }], - null => throw new InvalidOperationException("Invalid Role"), - _ => [new ChatRequestUserMessage(message.Content)] - }; - } - } - - private IEnumerable ProcessImageMessage(IAgent agent, ImageMessage message) - { - if (agent.Name == message.From) - { - // image message from assistant is not supported - throw new ArgumentException("ImageMessage is not supported when message.From is the same with agent"); - } - - var imageContentItem = this.CreateChatMessageImageContentItemFromImageMessage(message); - return [new ChatRequestUserMessage([imageContentItem])]; - } - - private IEnumerable ProcessMultiModalMessage(IAgent agent, MultiModalMessage message) - { - if (agent.Name == message.From) - { - // image message from assistant is not supported - throw new ArgumentException("MultiModalMessage is not supported when message.From is the same with agent"); - } - - IEnumerable items = message.Content.Select(ci => ci switch - { - TextMessage text => new ChatMessageTextContentItem(text.Content), - ImageMessage image => this.CreateChatMessageImageContentItemFromImageMessage(image), - _ => throw new NotImplementedException(), - }); - - return [new ChatRequestUserMessage(items)]; - } - - private ChatMessageImageContentItem CreateChatMessageImageContentItemFromImageMessage(ImageMessage message) - { - return message.Data is null && message.Url is not null - ? new ChatMessageImageContentItem(new Uri(message.Url)) - : new ChatMessageImageContentItem(message.Data, message.Data?.MediaType); - } - - private IEnumerable ProcessToolCallMessage(IAgent agent, ToolCallMessage message) - { - if (message.From is not null && message.From != agent.Name) - { - throw new ArgumentException("ToolCallMessage is not supported when message.From is not the same with agent"); - } - - var toolCall = message.ToolCalls.Select((tc, i) => new ChatCompletionsFunctionToolCall(tc.ToolCallId ?? $"{tc.FunctionName}_{i}", tc.FunctionName, tc.FunctionArguments)); - var textContent = message.GetContent() ?? string.Empty; - var chatRequestMessage = new ChatRequestAssistantMessage() { Content = textContent }; - foreach (var tc in toolCall) - { - chatRequestMessage.ToolCalls.Add(tc); - } - - return [chatRequestMessage]; - } - - private IEnumerable ProcessToolCallResultMessage(ToolCallResultMessage message) - { - return message.ToolCalls - .Where(tc => tc.Result is not null) - .Select((tc, i) => new ChatRequestToolMessage(tc.Result, tc.ToolCallId ?? $"{tc.FunctionName}_{i}")); - } - - private IEnumerable ProcessFunctionCallMiddlewareMessage(IAgent agent, AggregateMessage aggregateMessage) - { - if (aggregateMessage.From is not null && aggregateMessage.From != agent.Name) - { - // convert as user message - var resultMessage = aggregateMessage.Message2; - - return resultMessage.ToolCalls.Select(tc => new ChatRequestUserMessage(tc.Result)); - } - else - { - var toolCallMessage1 = aggregateMessage.Message1; - var toolCallResultMessage = aggregateMessage.Message2; - - var assistantMessage = this.ProcessToolCallMessage(agent, toolCallMessage1); - var toolCallResults = this.ProcessToolCallResultMessage(toolCallResultMessage); - - return assistantMessage.Concat(toolCallResults); - } - } -} diff --git a/dotnet/src/AutoGen.Core/Agent/DefaultReplyAgent.cs b/dotnet/src/AutoGen.Core/Agent/DefaultReplyAgent.cs deleted file mode 100644 index 3c8026772e..0000000000 --- a/dotnet/src/AutoGen.Core/Agent/DefaultReplyAgent.cs +++ /dev/null @@ -1,37 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// DefaultReplyAgent.cs - -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Core; - -public class DefaultReplyAgent : IAgent -{ - public DefaultReplyAgent( - string name, - string? defaultReply) - { - Name = name; - DefaultReply = defaultReply ?? string.Empty; - } - - public string Name { get; } - - public string DefaultReply { get; } = string.Empty; - - public async Task GenerateReplyAsync( - IEnumerable _, - GenerateReplyOptions? __ = null, - CancellationToken ___ = default) - { - return new TextMessage(Role.Assistant, DefaultReply, from: this.Name); - } -} diff --git a/dotnet/src/AutoGen.Core/Agent/GroupChatManager.cs b/dotnet/src/AutoGen.Core/Agent/GroupChatManager.cs deleted file mode 100644 index c38ecabb66..0000000000 --- a/dotnet/src/AutoGen.Core/Agent/GroupChatManager.cs +++ /dev/null @@ -1,40 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GroupChatManager.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Core; - -public class GroupChatManager : IAgent -{ - public GroupChatManager(IGroupChat groupChat) - { - GroupChat = groupChat; - } - public string Name => throw new ArgumentException("GroupChatManager does not have a name"); - - public IEnumerable? Messages { get; private set; } - - public IGroupChat GroupChat { get; } - - public async Task GenerateReplyAsync( - IEnumerable messages, - GenerateReplyOptions? options, - CancellationToken cancellationToken = default) - { - var response = await GroupChat.CallAsync(messages, ct: cancellationToken); - Messages = response; - - return response.Last(); - } -} diff --git a/dotnet/src/AutoGen.Core/Agent/IAgent.cs b/dotnet/src/AutoGen.Core/Agent/IAgent.cs deleted file mode 100644 index a5f79d4808..0000000000 --- a/dotnet/src/AutoGen.Core/Agent/IAgent.cs +++ /dev/null @@ -1,60 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// IAgent.cs - -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Core; - -public interface IAgentMetaInformation -{ - public string Name { get; } -} - -public interface IAgent : IAgentMetaInformation -{ - /// - /// Generate reply - /// - /// conversation history - /// completion option. If provided, it should override existing option if there's any - public Task GenerateReplyAsync( - IEnumerable messages, - GenerateReplyOptions? options = null, - CancellationToken cancellationToken = default); -} - -public class GenerateReplyOptions -{ - public GenerateReplyOptions() - { - } - - /// - /// Copy constructor - /// - /// other option to copy from - public GenerateReplyOptions(GenerateReplyOptions other) - { - this.Temperature = other.Temperature; - this.MaxToken = other.MaxToken; - this.StopSequence = other.StopSequence?.Select(s => s)?.ToArray(); - this.Functions = other.Functions?.Select(f => f)?.ToArray(); - } - - public float? Temperature { get; set; } - - public int? MaxToken { get; set; } - - public string[]? StopSequence { get; set; } - - public FunctionContract[]? Functions { get; set; } -} diff --git a/dotnet/src/AutoGen.Core/Agent/IMiddlewareAgent.cs b/dotnet/src/AutoGen.Core/Agent/IMiddlewareAgent.cs deleted file mode 100644 index 0e87c2880b..0000000000 --- a/dotnet/src/AutoGen.Core/Agent/IMiddlewareAgent.cs +++ /dev/null @@ -1,60 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// IMiddlewareAgent.cs - -using System.Collections.Generic; - -namespace AutoGen.Core; - -public interface IMiddlewareAgent : IAgent -{ - /// - /// Get the inner agent. - /// - IAgent Agent { get; } - - /// - /// Get the middlewares. - /// - IEnumerable Middlewares { get; } - - /// - /// Use middleware. - /// - void Use(IMiddleware middleware); -} - -public interface IMiddlewareStreamAgent : IStreamingAgent -{ - /// - /// Get the inner agent. - /// - IStreamingAgent StreamingAgent { get; } - - IEnumerable StreamingMiddlewares { get; } - - void UseStreaming(IStreamingMiddleware middleware); -} - -public interface IMiddlewareAgent : IMiddlewareAgent - where T : IAgent -{ - /// - /// Get the typed inner agent. - /// - T TAgent { get; } -} - -public interface IMiddlewareStreamAgent : IMiddlewareStreamAgent - where T : IStreamingAgent -{ - /// - /// Get the typed inner agent. - /// - T TStreamingAgent { get; } -} diff --git a/dotnet/src/AutoGen.Core/Agent/IStreamingAgent.cs b/dotnet/src/AutoGen.Core/Agent/IStreamingAgent.cs deleted file mode 100644 index 0f4159f238..0000000000 --- a/dotnet/src/AutoGen.Core/Agent/IStreamingAgent.cs +++ /dev/null @@ -1,24 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// IStreamingAgent.cs - -using System.Collections.Generic; -using System.Threading; - -namespace AutoGen.Core; - -/// -/// agent that supports streaming reply -/// -public interface IStreamingAgent : IAgent -{ - public IAsyncEnumerable GenerateStreamingReplyAsync( - IEnumerable messages, - GenerateReplyOptions? options = null, - CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/AutoGen.Core/Agent/MiddlewareAgent.cs b/dotnet/src/AutoGen.Core/Agent/MiddlewareAgent.cs deleted file mode 100644 index feff1fc77d..0000000000 --- a/dotnet/src/AutoGen.Core/Agent/MiddlewareAgent.cs +++ /dev/null @@ -1,146 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MiddlewareAgent.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Core; - -/// -/// An agent that allows you to add middleware and modify the behavior of an existing agent. -/// -public class MiddlewareAgent : IMiddlewareAgent -{ - private IAgent _agent; - private readonly List middlewares = new(); - - /// - /// Create a new instance of - /// - /// the inner agent where middleware will be added. - /// the name of the agent if provided. Otherwise, the name of will be used. - public MiddlewareAgent(IAgent innerAgent, string? name = null, IEnumerable? middlewares = null) - { - this.Name = name ?? innerAgent.Name; - this._agent = innerAgent; - if (middlewares != null && middlewares.Any()) - { - foreach (var middleware in middlewares) - { - this.Use(middleware); - } - } - } - - /// - /// Create a new instance of by copying the middlewares from another . - /// - public MiddlewareAgent(MiddlewareAgent other) - { - this.Name = other.Name; - this._agent = other._agent; - this.middlewares.AddRange(other.middlewares); - } - - public string Name { get; } - - /// - /// Get the inner agent. - /// - public IAgent Agent => this._agent; - - /// - /// Get the middlewares. - /// - public IEnumerable Middlewares => this.middlewares; - - public Task GenerateReplyAsync( - IEnumerable messages, - GenerateReplyOptions? options = null, - CancellationToken cancellationToken = default) - { - return _agent.GenerateReplyAsync(messages, options, cancellationToken); - } - - /// - /// Add a middleware to the agent. If multiple middlewares are added, they will be executed in the LIFO order. - /// Call into the next function to continue the execution of the next middleware. - /// Short cut middleware execution by not calling into the next function. - /// - public void Use(Func, GenerateReplyOptions?, IAgent, CancellationToken, Task> func, string? middlewareName = null) - { - var middleware = new DelegateMiddleware(middlewareName, async (context, agent, cancellationToken) => - { - return await func(context.Messages, context.Options, agent, cancellationToken); - }); - - this.Use(middleware); - } - - public void Use(IMiddleware middleware) - { - this.middlewares.Add(middleware); - _agent = new DelegateAgent(middleware, _agent); - } - - public override string ToString() - { - var names = this.Middlewares.Select(m => m.Name ?? "[Unknown middleware]"); - var namesPlusAgentName = names.Append(this.Name); - - return namesPlusAgentName.Aggregate((a, b) => $"{a} -> {b}"); - } - - private class DelegateAgent : IAgent - { - private readonly IAgent innerAgent; - private readonly IMiddleware middleware; - - public DelegateAgent(IMiddleware middleware, IAgent innerAgent) - { - this.middleware = middleware; - this.innerAgent = innerAgent; - } - - public string Name { get => this.innerAgent.Name; } - - public Task GenerateReplyAsync( - IEnumerable messages, - GenerateReplyOptions? options = null, - CancellationToken cancellationToken = default) - { - var context = new MiddlewareContext(messages, options); - return this.middleware.InvokeAsync(context, this.innerAgent, cancellationToken); - } - } -} - -public sealed class MiddlewareAgent : MiddlewareAgent, IMiddlewareAgent - where T : IAgent -{ - public MiddlewareAgent(T innerAgent, string? name = null) - : base(innerAgent, name) - { - this.TAgent = innerAgent; - } - - public MiddlewareAgent(MiddlewareAgent other) - : base(other) - { - this.TAgent = other.TAgent; - } - - /// - /// Get the inner agent of type . - /// - public T TAgent { get; } -} diff --git a/dotnet/src/AutoGen.Core/Agent/MiddlewareStreamingAgent.cs b/dotnet/src/AutoGen.Core/Agent/MiddlewareStreamingAgent.cs deleted file mode 100644 index 652fc38a54..0000000000 --- a/dotnet/src/AutoGen.Core/Agent/MiddlewareStreamingAgent.cs +++ /dev/null @@ -1,124 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MiddlewareStreamingAgent.cs - -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Core; - -public class MiddlewareStreamingAgent : IMiddlewareStreamAgent -{ - private IStreamingAgent _agent; - private readonly List _streamingMiddlewares = new(); - - public MiddlewareStreamingAgent( - IStreamingAgent agent, - string? name = null, - IEnumerable? streamingMiddlewares = null) - { - this.Name = name ?? agent.Name; - _agent = agent; - - if (streamingMiddlewares != null && streamingMiddlewares.Any()) - { - foreach (var middleware in streamingMiddlewares) - { - this.UseStreaming(middleware); - } - } - } - - /// - /// Get the inner agent. - /// - public IStreamingAgent StreamingAgent => _agent; - - /// - /// Get the streaming middlewares. - /// - public IEnumerable StreamingMiddlewares => _streamingMiddlewares; - - public string Name { get; } - - public Task GenerateReplyAsync(IEnumerable messages, GenerateReplyOptions? options = null, CancellationToken cancellationToken = default) - { - return _agent.GenerateReplyAsync(messages, options, cancellationToken); - } - - public IAsyncEnumerable GenerateStreamingReplyAsync(IEnumerable messages, GenerateReplyOptions? options = null, CancellationToken cancellationToken = default) - { - return _agent.GenerateStreamingReplyAsync(messages, options, cancellationToken); - } - - public void UseStreaming(IStreamingMiddleware middleware) - { - _streamingMiddlewares.Add(middleware); - _agent = new DelegateStreamingAgent(middleware, _agent); - } - - private class DelegateStreamingAgent : IStreamingAgent - { - private IStreamingMiddleware? streamingMiddleware; - private IStreamingAgent innerAgent; - - public string Name => innerAgent.Name; - - public DelegateStreamingAgent(IStreamingMiddleware middleware, IStreamingAgent next) - { - this.streamingMiddleware = middleware; - this.innerAgent = next; - } - - - public Task GenerateReplyAsync(IEnumerable messages, GenerateReplyOptions? options = null, CancellationToken cancellationToken = default) - { - if (this.streamingMiddleware is null) - { - return innerAgent.GenerateReplyAsync(messages, options, cancellationToken); - } - - var context = new MiddlewareContext(messages, options); - return this.streamingMiddleware.InvokeAsync(context, (IAgent)innerAgent, cancellationToken); - } - - public IAsyncEnumerable GenerateStreamingReplyAsync(IEnumerable messages, GenerateReplyOptions? options = null, CancellationToken cancellationToken = default) - { - if (streamingMiddleware is null) - { - return innerAgent.GenerateStreamingReplyAsync(messages, options, cancellationToken); - } - - var context = new MiddlewareContext(messages, options); - return streamingMiddleware.InvokeAsync(context, innerAgent, cancellationToken); - } - } -} - -public sealed class MiddlewareStreamingAgent : MiddlewareStreamingAgent, IMiddlewareStreamAgent - where T : IStreamingAgent -{ - public MiddlewareStreamingAgent(T innerAgent, string? name = null, IEnumerable? streamingMiddlewares = null) - : base(innerAgent, name, streamingMiddlewares) - { - TStreamingAgent = innerAgent; - } - - public MiddlewareStreamingAgent(MiddlewareStreamingAgent other) - : base(other) - { - TStreamingAgent = other.TStreamingAgent; - } - - /// - /// Get the inner agent. - /// - public T TStreamingAgent { get; } -} diff --git a/dotnet/src/AutoGen.Core/AutoGen.Core.csproj b/dotnet/src/AutoGen.Core/AutoGen.Core.csproj deleted file mode 100644 index 8cf9e9183d..0000000000 --- a/dotnet/src/AutoGen.Core/AutoGen.Core.csproj +++ /dev/null @@ -1,26 +0,0 @@ -ο»Ώ - - $(PackageTargetFrameworks) - AutoGen.Core - - - - - - - AutoGen.Core - - Core library for AutoGen. This package provides contracts and core functionalities for AutoGen. - - - - - - - - - - - - - diff --git a/dotnet/src/AutoGen.Core/Extension/AgentExtension.cs b/dotnet/src/AutoGen.Core/Extension/AgentExtension.cs deleted file mode 100644 index ce85e15140..0000000000 --- a/dotnet/src/AutoGen.Core/Extension/AgentExtension.cs +++ /dev/null @@ -1,190 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// AgentExtension.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Core; - -public static class AgentExtension -{ - /// - /// Send message to an agent. - /// - /// message to send. will be added to the end of if provided - /// sender agent. - /// chat history. - /// conversation history - public static async Task SendAsync( - this IAgent agent, - IMessage? message = null, - IEnumerable? chatHistory = null, - CancellationToken ct = default) - { - var messages = new List(); - - if (chatHistory != null) - { - messages.AddRange(chatHistory); - } - - if (message != null) - { - messages.Add(message); - } - - - var result = await agent.GenerateReplyAsync(messages, cancellationToken: ct); - - return result; - } - - /// - /// Send message to an agent. - /// - /// sender agent. - /// message to send. will be added to the end of if provided - /// chat history. - /// conversation history - public static async Task SendAsync( - this IAgent agent, - string message, - IEnumerable? chatHistory = null, - CancellationToken ct = default) - { - var msg = new TextMessage(Role.User, message); - - return await agent.SendAsync(msg, chatHistory, ct); - } - - /// - /// Send message to another agent and iterate over the responses. - /// - /// sender agent. - /// receiver agent. - /// chat history. - /// max conversation round. - /// conversation history - public static IAsyncEnumerable SendAsync( - this IAgent agent, - IAgent receiver, - IEnumerable chatHistory, - int maxRound = 10, - CancellationToken ct = default) - { - if (receiver is GroupChatManager manager) - { - var gc = manager.GroupChat; - - return gc.SendAsync(chatHistory, maxRound, ct); - } - - var groupChat = new RoundRobinGroupChat( - agents: - [ - agent, - receiver, - ]); - - return groupChat.SendAsync(chatHistory, maxRound, cancellationToken: ct); - } - - /// - /// Send message to another agent and iterate over the responses. - /// - /// sender agent. - /// message to send. will be added to the end of if provided - /// receiver agent. - /// chat history. - /// max conversation round. - /// conversation history - public static IAsyncEnumerable SendAsync( - this IAgent agent, - IAgent receiver, - string message, - IEnumerable? chatHistory = null, - int maxRound = 10, - CancellationToken ct = default) - { - var msg = new TextMessage(Role.User, message) - { - From = agent.Name, - }; - - chatHistory = chatHistory ?? new List(); - chatHistory = chatHistory.Append(msg); - - return agent.SendAsync(receiver, chatHistory, maxRound, ct); - } - - /// - /// Shortcut API to send message to another agent and get all responses. - /// To iterate over the responses, use or - /// - /// sender agent - /// receiver agent - /// message to send - /// max round - public static async Task> InitiateChatAsync( - this IAgent agent, - IAgent receiver, - string? message = null, - int maxRound = 10, - CancellationToken ct = default) - { - var chatHistory = new List(); - if (message != null) - { - var msg = new TextMessage(Role.User, message) - { - From = agent.Name, - }; - - chatHistory.Add(msg); - } - - await foreach (var msg in agent.SendAsync(receiver, chatHistory, maxRound, ct)) - { - chatHistory.Add(msg); - } - - return chatHistory; - } - - [Obsolete("use GroupChatExtension.SendAsync")] - public static IAsyncEnumerable SendMessageToGroupAsync( - this IAgent agent, - IGroupChat groupChat, - string msg, - IEnumerable? chatHistory = null, - int maxRound = 10, - CancellationToken ct = default) - { - var chatMessage = new TextMessage(Role.Assistant, msg, from: agent.Name); - chatHistory = chatHistory ?? Enumerable.Empty(); - chatHistory = chatHistory.Append(chatMessage); - - return agent.SendMessageToGroupAsync(groupChat, chatHistory, maxRound, ct); - } - - [Obsolete("use GroupChatExtension.SendAsync")] - public static IAsyncEnumerable SendMessageToGroupAsync( - this IAgent _, - IGroupChat groupChat, - IEnumerable? chatHistory = null, - int maxRound = 10, - CancellationToken ct = default) - { - chatHistory = chatHistory ?? Enumerable.Empty(); - return groupChat.SendAsync(chatHistory, maxRound, ct); - } -} diff --git a/dotnet/src/AutoGen.Core/Extension/GroupChatExtension.cs b/dotnet/src/AutoGen.Core/Extension/GroupChatExtension.cs deleted file mode 100644 index e8f78995bb..0000000000 --- a/dotnet/src/AutoGen.Core/Extension/GroupChatExtension.cs +++ /dev/null @@ -1,157 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GroupChatExtension.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; - -namespace AutoGen.Core; - -public static class GroupChatExtension -{ - public const string TERMINATE = "[GROUPCHAT_TERMINATE]"; - public const string CLEAR_MESSAGES = "[GROUPCHAT_CLEAR_MESSAGES]"; - - [Obsolete("please use SendIntroduction")] - public static void AddInitializeMessage(this IAgent agent, string message, IGroupChat groupChat) - { - var msg = new TextMessage(Role.User, message) - { - From = agent.Name - }; - - groupChat.SendIntroduction(msg); - } - - /// - /// Send messages to a and return new messages from the group chat. - /// - /// - /// - /// - /// - /// - public static async IAsyncEnumerable SendAsync( - this IGroupChat groupChat, - IEnumerable chatHistory, - int maxRound = 10, - [EnumeratorCancellation] - CancellationToken cancellationToken = default) - { - while (maxRound-- > 0) - { - var messages = await groupChat.CallAsync(chatHistory, maxRound: 1, cancellationToken); - - // if no new messages, break the loop - if (messages.Count() == chatHistory.Count()) - { - yield break; - } - - var lastMessage = messages.Last(); - - yield return lastMessage; - if (lastMessage.IsGroupChatTerminateMessage()) - { - yield break; - } - - // messages will contain the complete chat history, include initialize messages - // but we only need to add the last message to the chat history - // fix #3268 - chatHistory = chatHistory.Append(lastMessage); - } - } - - /// - /// Send an instruction message to the group chat. - /// - public static void SendIntroduction(this IAgent agent, string message, IGroupChat groupChat) - { - var msg = new TextMessage(Role.User, message) - { - From = agent.Name - }; - - groupChat.SendIntroduction(msg); - } - - public static IEnumerable MessageToKeep( - this IGroupChat _, - IEnumerable messages) - { - var lastCLRMessageIndex = messages.ToList() - .FindLastIndex(x => x.IsGroupChatClearMessage()); - - // if multiple clr messages, e.g [msg, clr, msg, clr, msg, clr, msg] - // only keep the messages after the second last clr message. - if (messages.Count(m => m.IsGroupChatClearMessage()) > 1) - { - lastCLRMessageIndex = messages.ToList() - .FindLastIndex(lastCLRMessageIndex - 1, lastCLRMessageIndex - 1, x => x.IsGroupChatClearMessage()); - messages = messages.Skip(lastCLRMessageIndex); - } - - lastCLRMessageIndex = messages.ToList() - .FindLastIndex(x => x.IsGroupChatClearMessage()); - - if (lastCLRMessageIndex != -1 && messages.Count() - lastCLRMessageIndex >= 2) - { - messages = messages.Skip(lastCLRMessageIndex); - } - - return messages; - } - - /// - /// Return true if contains , otherwise false. - /// - /// - /// - public static bool IsGroupChatTerminateMessage(this IMessage message) - { - return message.GetContent()?.Contains(TERMINATE) ?? false; - } - - public static bool IsGroupChatClearMessage(this IMessage message) - { - return message.GetContent()?.Contains(CLEAR_MESSAGES) ?? false; - } - - [Obsolete] - public static IEnumerable ProcessConversationForAgent( - this IGroupChat groupChat, - IEnumerable initialMessages, - IEnumerable messages) - { - messages = groupChat.MessageToKeep(messages); - return initialMessages.Concat(messages); - } - - internal static IEnumerable ProcessConversationsForRolePlay( - this IGroupChat groupChat, - IEnumerable initialMessages, - IEnumerable messages) - { - messages = groupChat.MessageToKeep(messages); - var messagesToKeep = initialMessages.Concat(messages); - - return messagesToKeep.Select((x, i) => - { - var msg = @$"From {x.From}: -{x.GetContent()} - -round # {i}"; - - return new TextMessage(Role.User, content: msg); - }); - } -} diff --git a/dotnet/src/AutoGen.Core/Extension/MessageExtension.cs b/dotnet/src/AutoGen.Core/Extension/MessageExtension.cs deleted file mode 100644 index 047c8978d8..0000000000 --- a/dotnet/src/AutoGen.Core/Extension/MessageExtension.cs +++ /dev/null @@ -1,229 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MessageExtension.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; - -namespace AutoGen.Core; - -public static class MessageExtension -{ - private static string separator = new string('-', 20); - - public static string FormatMessage(this IMessage message) - { - return message switch - { -#pragma warning disable CS0618 // deprecated - Message msg => msg.FormatMessage(), -#pragma warning restore CS0618 // deprecated - TextMessage textMessage => textMessage.FormatMessage(), - ImageMessage imageMessage => imageMessage.FormatMessage(), - ToolCallMessage toolCallMessage => toolCallMessage.FormatMessage(), - ToolCallResultMessage toolCallResultMessage => toolCallResultMessage.FormatMessage(), - AggregateMessage aggregateMessage => aggregateMessage.FormatMessage(), - _ => message.ToString(), - } ?? string.Empty; - } - - public static string FormatMessage(this TextMessage message) - { - var sb = new StringBuilder(); - // write from - sb.AppendLine($"TextMessage from {message.From}"); - // write a seperator - sb.AppendLine(separator); - sb.AppendLine(message.Content); - // write a seperator - sb.AppendLine(separator); - - return sb.ToString(); - } - - public static string FormatMessage(this ImageMessage message) - { - var sb = new StringBuilder(); - // write from - sb.AppendLine($"ImageMessage from {message.From}"); - // write a seperator - sb.AppendLine(separator); - sb.AppendLine($"Image: {message.Url}"); - // write a seperator - sb.AppendLine(separator); - - return sb.ToString(); - } - - public static string FormatMessage(this ToolCallMessage message) - { - var sb = new StringBuilder(); - // write from - sb.AppendLine($"ToolCallMessage from {message.From}"); - - // write a seperator - sb.AppendLine(separator); - - foreach (var toolCall in message.ToolCalls) - { - sb.AppendLine($"- {toolCall.FunctionName}: {toolCall.FunctionArguments}"); - } - - sb.AppendLine(separator); - - return sb.ToString(); - } - - public static string FormatMessage(this ToolCallResultMessage message) - { - var sb = new StringBuilder(); - // write from - sb.AppendLine($"ToolCallResultMessage from {message.From}"); - - // write a seperator - sb.AppendLine(separator); - - foreach (var toolCall in message.ToolCalls) - { - sb.AppendLine($"- {toolCall.FunctionName}: {toolCall.Result}"); - } - - sb.AppendLine(separator); - - return sb.ToString(); - } - - public static string FormatMessage(this AggregateMessage message) - { - var sb = new StringBuilder(); - // write from - sb.AppendLine($"AggregateMessage from {message.From}"); - - // write a seperator - sb.AppendLine(separator); - - sb.AppendLine("ToolCallMessage:"); - sb.AppendLine(message.Message1.FormatMessage()); - - sb.AppendLine("ToolCallResultMessage:"); - sb.AppendLine(message.Message2.FormatMessage()); - - sb.AppendLine(separator); - - return sb.ToString(); - } - - [Obsolete("This method is deprecated, please use the extension method FormatMessage(this IMessage message) instead.")] - public static string FormatMessage(this Message message) - { - var sb = new StringBuilder(); - // write from - sb.AppendLine($"Message from {message.From}"); - // write a seperator - sb.AppendLine(separator); - - // write content - sb.AppendLine($"content: {message.Content}"); - - // write function name if exists - if (!string.IsNullOrEmpty(message.FunctionName)) - { - sb.AppendLine($"function name: {message.FunctionName}"); - sb.AppendLine($"function arguments: {message.FunctionArguments}"); - } - - // write metadata - if (message.Metadata is { Count: > 0 }) - { - sb.AppendLine($"metadata:"); - foreach (var item in message.Metadata) - { - sb.AppendLine($"{item.Key}: {item.Value}"); - } - } - - // write a seperator - sb.AppendLine(separator); - - return sb.ToString(); - } - - public static bool IsSystemMessage(this IMessage message) - { - return message switch - { - TextMessage textMessage => textMessage.Role == Role.System, -#pragma warning disable CS0618 // deprecated - Message msg => msg.Role == Role.System, -#pragma warning restore CS0618 // deprecated - _ => false, - }; - } - - /// - /// Get the content from the message - /// if the message implements , return the content from the message by calling - /// if the message is a where TMessage1 is and TMessage2 is and the second message only contains one function call, return the result of that function call - /// for all other situation, return null. - /// - /// - public static string? GetContent(this IMessage message) - { - return message switch - { - ICanGetTextContent canGetTextContent => canGetTextContent.GetContent(), - AggregateMessage aggregateMessage => string.Join("\n", aggregateMessage.Message2.ToolCalls.Where(x => x.Result is not null).Select(x => x.Result)), -#pragma warning disable CS0618 // deprecated - Message msg => msg.Content, -#pragma warning restore CS0618 // deprecated - _ => null, - }; - } - - /// - /// Get the role from the message if it's available. - /// - public static Role? GetRole(this IMessage message) - { - return message switch - { - TextMessage textMessage => textMessage.Role, -#pragma warning disable CS0618 // deprecated - Message msg => msg.Role, -#pragma warning restore CS0618 // deprecated - ImageMessage img => img.Role, - MultiModalMessage multiModal => multiModal.Role, - _ => null, - }; - } - - /// - /// Return the tool calls from the message if it's available. - /// if the message implements , return the tool calls from the message by calling - /// if the message is a where TMessage1 is and TMessage2 is , return the tool calls from the first message - /// - /// - /// - public static IList? GetToolCalls(this IMessage message) - { - return message switch - { - ICanGetToolCalls canGetToolCalls => canGetToolCalls.GetToolCalls().ToList(), -#pragma warning disable CS0618 // deprecated - Message msg => msg.FunctionName is not null && msg.FunctionArguments is not null - ? msg.Content is not null ? [new ToolCall(msg.FunctionName, msg.FunctionArguments, result: msg.Content)] - : new List { new(msg.FunctionName, msg.FunctionArguments) } - : null, -#pragma warning restore CS0618 // deprecated - AggregateMessage aggregateMessage => aggregateMessage.Message1.ToolCalls, - _ => null, - }; - } -} diff --git a/dotnet/src/AutoGen.Core/Extension/MiddlewareExtension.cs b/dotnet/src/AutoGen.Core/Extension/MiddlewareExtension.cs deleted file mode 100644 index 41a6055717..0000000000 --- a/dotnet/src/AutoGen.Core/Extension/MiddlewareExtension.cs +++ /dev/null @@ -1,151 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MiddlewareExtension.cs - -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Core; - -public static class MiddlewareExtension -{ - /// - /// Register a auto reply hook to an agent. The hook will be called before the agent generate the reply. - /// If the hook return a non-null reply, then that non-null reply will be returned directly without calling the agent. - /// Otherwise, the agent will generate the reply. - /// This is useful when you want to override the agent reply in some cases. - /// - /// - /// - /// - /// throw when agent name is null. - [Obsolete("Use RegisterMiddleware instead.")] - public static MiddlewareAgent RegisterReply( - this TAgent agent, - Func, CancellationToken, Task> replyFunc) - where TAgent : IAgent - { - return agent.RegisterMiddleware(async (messages, options, agent, ct) => - { - var reply = await replyFunc(messages, ct); - - if (reply != null) - { - return reply; - } - - return await agent.GenerateReplyAsync(messages, options, ct); - }); - } - - /// - /// Register a post process hook to an agent. The hook will be called before the agent return the reply and after the agent generate the reply. - /// This is useful when you want to customize arbitrary behavior before the agent return the reply. - /// - /// One example is , which print the formatted message to console before the agent return the reply. - /// - /// throw when agent name is null. - [Obsolete("Use RegisterMiddleware instead.")] - public static MiddlewareAgent RegisterPostProcess( - this TAgent agent, - Func, IMessage, CancellationToken, Task> postprocessFunc) - where TAgent : IAgent - { - return agent.RegisterMiddleware(async (messages, options, agent, ct) => - { - var reply = await agent.GenerateReplyAsync(messages, options, ct); - - return await postprocessFunc(messages, reply, ct); - }); - } - - /// - /// Register a pre process hook to an agent. The hook will be called before the agent generate the reply. This is useful when you want to modify the conversation history before the agent generate the reply. - /// - /// throw when agent name is null. - [Obsolete("Use RegisterMiddleware instead.")] - public static MiddlewareAgent RegisterPreProcess( - this TAgent agent, - Func, CancellationToken, Task>> preprocessFunc) - where TAgent : IAgent - { - return agent.RegisterMiddleware(async (messages, options, agent, ct) => - { - var newMessages = await preprocessFunc(messages, ct); - - return await agent.GenerateReplyAsync(newMessages, options, ct); - }); - } - - /// - /// Register a middleware to an existing agent and return a new agent with the middleware. - /// To register a streaming middleware, use . - /// - public static MiddlewareAgent RegisterMiddleware( - this TAgent agent, - Func, GenerateReplyOptions?, IAgent, CancellationToken, Task> func, - string? middlewareName = null) - where TAgent : IAgent - { - var middleware = new DelegateMiddleware(middlewareName, async (context, agent, cancellationToken) => - { - return await func(context.Messages, context.Options, agent, cancellationToken); - }); - - return agent.RegisterMiddleware(middleware); - } - - /// - /// Register a middleware to an existing agent and return a new agent with the middleware. - /// To register a streaming middleware, use . - /// - public static MiddlewareAgent RegisterMiddleware( - this TAgent agent, - IMiddleware middleware) - where TAgent : IAgent - { - var middlewareAgent = new MiddlewareAgent(agent); - - return middlewareAgent.RegisterMiddleware(middleware); - } - - /// - /// Register a middleware to an existing agent and return a new agent with the middleware. - /// To register a streaming middleware, use . - /// - public static MiddlewareAgent RegisterMiddleware( - this MiddlewareAgent agent, - Func, GenerateReplyOptions?, IAgent, CancellationToken, Task> func, - string? middlewareName = null) - where TAgent : IAgent - { - var delegateMiddleware = new DelegateMiddleware(middlewareName, async (context, agent, cancellationToken) => - { - return await func(context.Messages, context.Options, agent, cancellationToken); - }); - - return agent.RegisterMiddleware(delegateMiddleware); - } - - /// - /// Register a middleware to an existing agent and return a new agent with the middleware. - /// To register a streaming middleware, use . - /// - public static MiddlewareAgent RegisterMiddleware( - this MiddlewareAgent agent, - IMiddleware middleware) - where TAgent : IAgent - { - var copyAgent = new MiddlewareAgent(agent); - copyAgent.Use(middleware); - - return copyAgent; - } -} diff --git a/dotnet/src/AutoGen.Core/Extension/PrintMessageMiddlewareExtension.cs b/dotnet/src/AutoGen.Core/Extension/PrintMessageMiddlewareExtension.cs deleted file mode 100644 index f782e469ee..0000000000 --- a/dotnet/src/AutoGen.Core/Extension/PrintMessageMiddlewareExtension.cs +++ /dev/null @@ -1,75 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// PrintMessageMiddlewareExtension.cs - -using System; - -namespace AutoGen.Core; - -public static class PrintMessageMiddlewareExtension -{ - [Obsolete("This API will be removed in v0.1.0, Use RegisterPrintMessage instead.")] - public static MiddlewareAgent RegisterPrintFormatMessageHook(this TAgent agent) - where TAgent : IAgent - { - return RegisterPrintMessage(agent); - } - - [Obsolete("This API will be removed in v0.1.0, Use RegisterPrintMessage instead.")] - public static MiddlewareAgent RegisterPrintFormatMessageHook(this MiddlewareAgent agent) - where TAgent : IAgent - { - return RegisterPrintMessage(agent); - } - - [Obsolete("This API will be removed in v0.1.0, Use RegisterPrintMessage instead.")] - public static MiddlewareStreamingAgent RegisterPrintFormatMessageHook(this MiddlewareStreamingAgent agent) - where TAgent : IStreamingAgent - { - return RegisterPrintMessage(agent); - } - - /// - /// Register a to which print formatted message to console. - /// - public static MiddlewareAgent RegisterPrintMessage(this TAgent agent) - where TAgent : IAgent - { - var middleware = new PrintMessageMiddleware(); - var middlewareAgent = new MiddlewareAgent(agent); - middlewareAgent.Use(middleware); - - return middlewareAgent; - } - - /// - /// Register a to which print formatted message to console. - /// - public static MiddlewareAgent RegisterPrintMessage(this MiddlewareAgent agent) - where TAgent : IAgent - { - var middleware = new PrintMessageMiddleware(); - var middlewareAgent = new MiddlewareAgent(agent); - middlewareAgent.Use(middleware); - - return middlewareAgent; - } - - /// - /// Register a to which print formatted message to console. - /// - public static MiddlewareStreamingAgent RegisterPrintMessage(this MiddlewareStreamingAgent agent) - where TAgent : IStreamingAgent - { - var middleware = new PrintMessageMiddleware(); - var middlewareAgent = new MiddlewareStreamingAgent(agent); - middlewareAgent.UseStreaming(middleware); - - return middlewareAgent; - } -} diff --git a/dotnet/src/AutoGen.Core/Extension/StreamingMiddlewareExtension.cs b/dotnet/src/AutoGen.Core/Extension/StreamingMiddlewareExtension.cs deleted file mode 100644 index c75037ff9b..0000000000 --- a/dotnet/src/AutoGen.Core/Extension/StreamingMiddlewareExtension.cs +++ /dev/null @@ -1,43 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// StreamingMiddlewareExtension.cs - -namespace AutoGen.Core; - -public static class StreamingMiddlewareExtension -{ - /// - /// Register an to an existing and return a new agent with the registered middleware. - /// For registering an , please refer to - /// - public static MiddlewareStreamingAgent RegisterStreamingMiddleware( - this TStreamingAgent agent, - IStreamingMiddleware middleware) - where TStreamingAgent : IStreamingAgent - { - var middlewareAgent = new MiddlewareStreamingAgent(agent); - middlewareAgent.UseStreaming(middleware); - - return middlewareAgent; - } - - /// - /// Register an to an existing and return a new agent with the registered middleware. - /// For registering an , please refer to - /// - public static MiddlewareStreamingAgent RegisterStreamingMiddleware( - this MiddlewareStreamingAgent agent, - IStreamingMiddleware middleware) - where TAgent : IStreamingAgent - { - var copyAgent = new MiddlewareStreamingAgent(agent); - copyAgent.UseStreaming(middleware); - - return copyAgent; - } -} diff --git a/dotnet/src/AutoGen.Core/Function/FunctionAttribute.cs b/dotnet/src/AutoGen.Core/Function/FunctionAttribute.cs deleted file mode 100644 index 71b80154a7..0000000000 --- a/dotnet/src/AutoGen.Core/Function/FunctionAttribute.cs +++ /dev/null @@ -1,99 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// FunctionAttribute.cs - -using System; -using System.Collections.Generic; - -namespace AutoGen.Core; - -[AttributeUsage(AttributeTargets.Method, Inherited = false, AllowMultiple = false)] -public class FunctionAttribute : Attribute -{ - public string? FunctionName { get; } - - public string? Description { get; } - - public FunctionAttribute(string? functionName = null, string? description = null) - { - FunctionName = functionName; - Description = description; - } -} - -public class FunctionContract -{ - /// - /// The namespace of the function. - /// - public string? Namespace { get; set; } - - /// - /// The class name of the function. - /// - public string? ClassName { get; set; } - - /// - /// The name of the function. - /// - public string Name { get; set; } = null!; - - /// - /// The description of the function. - /// If a structured comment is available, the description will be extracted from the summary section. - /// Otherwise, the description will be null. - /// - public string? Description { get; set; } - - /// - /// The parameters of the function. - /// - public IEnumerable? Parameters { get; set; } - - /// - /// The return type of the function. - /// - public Type? ReturnType { get; set; } - - /// - /// The description of the return section. - /// If a structured comment is available, the description will be extracted from the return section. - /// Otherwise, the description will be null. - /// - public string? ReturnDescription { get; set; } -} - -public class FunctionParameterContract -{ - /// - /// The name of the parameter. - /// - public string? Name { get; set; } - - /// - /// The description of the parameter. - /// This will be extracted from the param section of the structured comment if available. - /// Otherwise, the description will be null. - /// - public string? Description { get; set; } - - /// - /// The type of the parameter. - /// - public Type? ParameterType { get; set; } - - /// - /// If the parameter is a required parameter. - /// - public bool IsRequired { get; set; } - - /// - /// The default value of the parameter. - /// - public object? DefaultValue { get; set; } -} diff --git a/dotnet/src/AutoGen.Core/GroupChat/Graph.cs b/dotnet/src/AutoGen.Core/GroupChat/Graph.cs deleted file mode 100644 index 735d5fbd47..0000000000 --- a/dotnet/src/AutoGen.Core/GroupChat/Graph.cs +++ /dev/null @@ -1,136 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Graph.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Core; - -public class Graph -{ - private readonly List transitions = new List(); - - public Graph(IEnumerable? transitions = null) - { - if (transitions != null) - { - this.transitions.AddRange(transitions); - } - } - - public void AddTransition(Transition transition) - { - transitions.Add(transition); - } - - /// - /// Get the transitions of the workflow. - /// - public IEnumerable Transitions => transitions; - - /// - /// Get the next available agents that the messages can be transit to. - /// - /// the from agent - /// messages - /// A list of agents that the messages can be transit to - public async Task> TransitToNextAvailableAgentsAsync(IAgent fromAgent, IEnumerable messages, CancellationToken ct = default) - { - var nextAgents = new List(); - var availableTransitions = transitions.FindAll(t => t.From == fromAgent) ?? Enumerable.Empty(); - foreach (var transition in availableTransitions) - { - if (await transition.CanTransitionAsync(messages, ct)) - { - nextAgents.Add(transition.To); - } - } - - return nextAgents; - } -} - -/// -/// Represents a transition between two agents. -/// -public class Transition -{ - private readonly IAgent _from; - private readonly IAgent _to; - private readonly Func, CancellationToken, Task>? _canTransition; - - /// - /// Create a new instance of . - /// This constructor is used for testing purpose only. - /// To create a new instance of , use . - /// - /// from agent - /// to agent - /// detect if the transition is allowed, default to be always true - internal Transition(IAgent from, IAgent to, Func, CancellationToken, Task>? canTransitionAsync = null) - { - _from = from; - _to = to; - _canTransition = canTransitionAsync; - } - - /// - /// Create a new instance of without transition condition check. - /// - /// " - public static Transition Create(TFromAgent from, TToAgent to) - where TFromAgent : IAgent - where TToAgent : IAgent - { - return new Transition(from, to, (fromAgent, toAgent, messages, _) => Task.FromResult(true)); - } - - /// - /// Create a new instance of . - /// - /// " - public static Transition Create(TFromAgent from, TToAgent to, Func, Task> canTransitionAsync) - where TFromAgent : IAgent - where TToAgent : IAgent - { - return new Transition(from, to, (fromAgent, toAgent, messages, _) => canTransitionAsync.Invoke((TFromAgent)fromAgent, (TToAgent)toAgent, messages)); - } - - /// - /// Create a new instance of with cancellation token. - /// - /// " - public static Transition Create(TFromAgent from, TToAgent to, Func, CancellationToken, Task> canTransitionAsync) - where TFromAgent : IAgent - where TToAgent : IAgent - { - return new Transition(from, to, (fromAgent, toAgent, messages, ct) => canTransitionAsync.Invoke((TFromAgent)fromAgent, (TToAgent)toAgent, messages, ct)); - } - - public IAgent From => _from; - - public IAgent To => _to; - - /// - /// Check if the transition is allowed. - /// - /// messages - public Task CanTransitionAsync(IEnumerable messages, CancellationToken ct = default) - { - if (_canTransition == null) - { - return Task.FromResult(true); - } - - return _canTransition(this.From, this.To, messages, ct); - } -} diff --git a/dotnet/src/AutoGen.Core/GroupChat/GroupChat.cs b/dotnet/src/AutoGen.Core/GroupChat/GroupChat.cs deleted file mode 100644 index 1c7e5362ce..0000000000 --- a/dotnet/src/AutoGen.Core/GroupChat/GroupChat.cs +++ /dev/null @@ -1,219 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GroupChat.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Core; - -public class GroupChat : IGroupChat -{ - private IAgent? admin; - private List agents = new List(); - private IEnumerable initializeMessages = new List(); - private Graph? workflow = null; - private readonly IOrchestrator orchestrator; - - public IEnumerable? Messages { get; private set; } - - /// - /// Create a group chat. The next speaker will be decided by a combination effort of the admin and the workflow. - /// - /// admin agent. If provided, the admin will be invoked to decide the next speaker. - /// workflow of the group chat. If provided, the next speaker will be decided by the workflow. - /// group members. - /// - public GroupChat( - IEnumerable members, - IAgent? admin = null, - IEnumerable? initializeMessages = null, - Graph? workflow = null) - { - this.admin = admin; - this.agents = members.ToList(); - this.initializeMessages = initializeMessages ?? new List(); - this.workflow = workflow; - - if (admin is not null) - { - this.orchestrator = new RolePlayOrchestrator(admin, workflow); - } - else if (workflow is not null) - { - this.orchestrator = new WorkflowOrchestrator(workflow); - } - else - { - this.orchestrator = new RoundRobinOrchestrator(); - } - - this.Validation(); - } - - /// - /// Create a group chat which uses the to decide the next speaker(s). - /// - /// - /// - /// - public GroupChat( - IEnumerable members, - IOrchestrator orchestrator, - IEnumerable? initializeMessages = null) - { - this.agents = members.ToList(); - this.initializeMessages = initializeMessages ?? new List(); - this.orchestrator = orchestrator; - - this.Validation(); - } - - private void Validation() - { - // check if all agents has a name - if (this.agents.Any(x => string.IsNullOrEmpty(x.Name))) - { - throw new Exception("All agents must have a name."); - } - - // check if any agents has the same name - var names = this.agents.Select(x => x.Name).ToList(); - if (names.Distinct().Count() != names.Count) - { - throw new Exception("All agents must have a unique name."); - } - - // if there's a workflow - // check if the agents in that workflow are in the group chat - if (this.workflow != null) - { - var agentNamesInWorkflow = this.workflow.Transitions.Select(x => x.From.Name!).Concat(this.workflow.Transitions.Select(x => x.To.Name!)).Distinct(); - if (agentNamesInWorkflow.Any(x => !this.agents.Select(a => a.Name).Contains(x))) - { - throw new Exception("All agents in the workflow must be in the group chat."); - } - } - } - - /// - /// Select the next speaker based on the conversation history. - /// The next speaker will be decided by a combination effort of the admin and the workflow. - /// Firstly, a group of candidates will be selected by the workflow. If there's only one candidate, then that candidate will be the next speaker. - /// Otherwise, the admin will be invoked to decide the next speaker using role-play prompt. - /// - /// current speaker - /// conversation history - /// next speaker. - [Obsolete("Please use RolePlayOrchestrator or WorkflowOrchestrator")] - public async Task SelectNextSpeakerAsync(IAgent currentSpeaker, IEnumerable conversationHistory) - { - var agentNames = this.agents.Select(x => x.Name).ToList(); - if (this.workflow != null) - { - var nextAvailableAgents = await this.workflow.TransitToNextAvailableAgentsAsync(currentSpeaker, conversationHistory); - agentNames = nextAvailableAgents.Select(x => x.Name).ToList(); - if (agentNames.Count() == 0) - { - throw new Exception("No next available agents found in the current workflow"); - } - - if (agentNames.Count() == 1) - { - return this.agents.First(x => x.Name == agentNames.First()); - } - } - - if (this.admin == null) - { - throw new Exception("No admin is provided."); - } - - var systemMessage = new TextMessage(Role.System, - content: $@"You are in a role play game. Carefully read the conversation history and carry on the conversation. -The available roles are: -{string.Join(",", agentNames)} - -Each message will start with 'From name:', e.g: -From {agentNames.First()}: -//your message//."); - - var conv = this.ProcessConversationsForRolePlay(this.initializeMessages, conversationHistory); - - var messages = new IMessage[] { systemMessage }.Concat(conv); - var response = await this.admin.GenerateReplyAsync( - messages: messages, - options: new GenerateReplyOptions - { - Temperature = 0, - MaxToken = 128, - StopSequence = [":"], - Functions = [], - }); - - var name = response?.GetContent() ?? throw new Exception("No name is returned."); - - // remove From - name = name!.Substring(5); - return this.agents.First(x => x.Name!.ToLower() == name.ToLower()); - } - - /// - public void AddInitializeMessage(IMessage message) - { - this.SendIntroduction(message); - } - - public async Task> CallAsync( - IEnumerable? chatHistory = null, - int maxRound = 10, - CancellationToken ct = default) - { - var conversationHistory = new List(); - conversationHistory.AddRange(this.initializeMessages); - if (chatHistory != null) - { - conversationHistory.AddRange(chatHistory); - } - var roundLeft = maxRound; - - while (roundLeft > 0) - { - var orchestratorContext = new OrchestrationContext - { - Candidates = this.agents, - ChatHistory = conversationHistory, - }; - var nextSpeaker = await this.orchestrator.GetNextSpeakerAsync(orchestratorContext, ct); - if (nextSpeaker == null) - { - break; - } - - var result = await nextSpeaker.GenerateReplyAsync(conversationHistory, cancellationToken: ct); - conversationHistory.Add(result); - - if (result.IsGroupChatTerminateMessage()) - { - return conversationHistory; - } - - roundLeft--; - } - - return conversationHistory; - } - - public void SendIntroduction(IMessage message) - { - this.initializeMessages = this.initializeMessages.Append(message); - } -} diff --git a/dotnet/src/AutoGen.Core/GroupChat/IGroupChat.cs b/dotnet/src/AutoGen.Core/GroupChat/IGroupChat.cs deleted file mode 100644 index 8a2c7148be..0000000000 --- a/dotnet/src/AutoGen.Core/GroupChat/IGroupChat.cs +++ /dev/null @@ -1,28 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// IGroupChat.cs - -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Core; - -public interface IGroupChat -{ - /// - /// Send an introduction message to the group chat. - /// - void SendIntroduction(IMessage message); - - [Obsolete("please use SendIntroduction")] - void AddInitializeMessage(IMessage message); - - Task> CallAsync(IEnumerable? conversation = null, int maxRound = 10, CancellationToken ct = default); -} diff --git a/dotnet/src/AutoGen.Core/GroupChat/RoundRobinGroupChat.cs b/dotnet/src/AutoGen.Core/GroupChat/RoundRobinGroupChat.cs deleted file mode 100644 index 53ff220091..0000000000 --- a/dotnet/src/AutoGen.Core/GroupChat/RoundRobinGroupChat.cs +++ /dev/null @@ -1,39 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// RoundRobinGroupChat.cs - -using System; -using System.Collections.Generic; - -namespace AutoGen.Core; - -/// -/// Obsolete: please use -/// -[Obsolete("please use RoundRobinGroupChat")] -public class SequentialGroupChat : RoundRobinGroupChat -{ - [Obsolete("please use RoundRobinGroupChat")] - public SequentialGroupChat(IEnumerable agents, List? initializeMessages = null) - : base(agents, initializeMessages) - { - } -} - -/// -/// A group chat that allows agents to talk in a round-robin manner. -/// -public class RoundRobinGroupChat : GroupChat -{ - public RoundRobinGroupChat( - IEnumerable agents, - List? initializeMessages = null) - : base(agents, initializeMessages: initializeMessages) - { - } -} diff --git a/dotnet/src/AutoGen.Core/ILLMConfig.cs b/dotnet/src/AutoGen.Core/ILLMConfig.cs deleted file mode 100644 index 6d84514998..0000000000 --- a/dotnet/src/AutoGen.Core/ILLMConfig.cs +++ /dev/null @@ -1,14 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ILLMConfig.cs - -namespace AutoGen.Core; - -public interface ILLMConfig -{ -} diff --git a/dotnet/src/AutoGen.Core/Message/AggregateMessage.cs b/dotnet/src/AutoGen.Core/Message/AggregateMessage.cs deleted file mode 100644 index 52a2234848..0000000000 --- a/dotnet/src/AutoGen.Core/Message/AggregateMessage.cs +++ /dev/null @@ -1,59 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// AggregateMessage.cs - -using System; -using System.Collections.Generic; - -namespace AutoGen.Core; - -public class AggregateMessage : IMessage - where TMessage1 : IMessage - where TMessage2 : IMessage -{ - public AggregateMessage(TMessage1 message1, TMessage2 message2, string? from = null) - { - this.From = from; - this.Message1 = message1; - this.Message2 = message2; - this.Validate(); - } - - public TMessage1 Message1 { get; } - - public TMessage2 Message2 { get; } - - public string? From { get; set; } - - private void Validate() - { - var messages = new List { this.Message1, this.Message2 }; - // the from property of all messages should be the same with the from property of the aggregate message - - foreach (var message in messages) - { - if (message.From != this.From) - { - throw new ArgumentException($"The from property of the message {message} is different from the from property of the aggregate message {this}"); - } - } - } - - public override string ToString() - { - var stringBuilder = new System.Text.StringBuilder(); - var messages = new List { this.Message1, this.Message2 }; - stringBuilder.Append($"AggregateMessage({this.From})"); - foreach (var message in messages) - { - stringBuilder.Append($"\n\t{message}"); - } - - return stringBuilder.ToString(); - } -} diff --git a/dotnet/src/AutoGen.Core/Message/IMessage.cs b/dotnet/src/AutoGen.Core/Message/IMessage.cs deleted file mode 100644 index bf727beb4e..0000000000 --- a/dotnet/src/AutoGen.Core/Message/IMessage.cs +++ /dev/null @@ -1,82 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// IMessage.cs - -using System; -using System.Collections.Generic; - -namespace AutoGen.Core; - -/// -/// The universal message interface for all message types in AutoGen. -/// Related PR: https://github.com/microsoft/autogen/pull/1676 -/// Built-in message types -/// -/// -/// : plain text message. -/// -/// -/// : image message. -/// -/// -/// : message type for multimodal message. The current support message items are and . -/// -/// -/// : message type for tool call. This message supports both single and parallel tool call. -/// -/// -/// : message type for tool call result. -/// -/// -/// : This type is used by previous version of AutoGen. And it's reserved for backward compatibility. -/// -/// -/// : an aggregate message type that contains two message types. -/// This type is useful when you want to combine two message types into one unique message type. One example is when invoking a tool call and you want to return both and . -/// One example of how this type is used in AutoGen is and its return message -/// -/// -/// -public interface IMessage -{ - string? From { get; set; } -} - -public interface IMessage : IMessage -{ - T Content { get; } -} - -/// -/// The interface for messages that can get text content. -/// This interface will be used by to get the content from the message. -/// -public interface ICanGetTextContent : IMessage -{ - public string? GetContent(); -} - -/// -/// The interface for messages that can get a list of -/// -public interface ICanGetToolCalls : IMessage -{ - public IEnumerable GetToolCalls(); -} - -[Obsolete("Use IMessage instead")] -public interface IStreamingMessage -{ - string? From { get; set; } -} - -[Obsolete("Use IMessage instead")] -public interface IStreamingMessage : IStreamingMessage -{ - T Content { get; } -} diff --git a/dotnet/src/AutoGen.Core/Message/ImageMessage.cs b/dotnet/src/AutoGen.Core/Message/ImageMessage.cs deleted file mode 100644 index 6011d518db..0000000000 --- a/dotnet/src/AutoGen.Core/Message/ImageMessage.cs +++ /dev/null @@ -1,88 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ImageMessage.cs - -using System; - -namespace AutoGen.Core; - -public class ImageMessage : IMessage -{ - public ImageMessage(Role role, string url, string? from = null, string? mimeType = null) - : this(role, new Uri(url), from, mimeType) - { - } - - public ImageMessage(Role role, Uri uri, string? from = null, string? mimeType = null) - { - this.Role = role; - this.From = from; - this.Url = uri.ToString(); - - // try infer mimeType from uri extension if not provided - if (mimeType is null) - { - mimeType = uri switch - { - _ when uri.AbsoluteUri.EndsWith(".png", StringComparison.OrdinalIgnoreCase) => "image/png", - _ when uri.AbsoluteUri.EndsWith(".jpg", StringComparison.OrdinalIgnoreCase) => "image/jpeg", - _ when uri.AbsoluteUri.EndsWith(".jpeg", StringComparison.OrdinalIgnoreCase) => "image/jpeg", - _ when uri.AbsoluteUri.EndsWith(".gif", StringComparison.OrdinalIgnoreCase) => "image/gif", - _ when uri.AbsoluteUri.EndsWith(".bmp", StringComparison.OrdinalIgnoreCase) => "image/bmp", - _ when uri.AbsoluteUri.EndsWith(".webp", StringComparison.OrdinalIgnoreCase) => "image/webp", - _ when uri.AbsoluteUri.EndsWith(".svg", StringComparison.OrdinalIgnoreCase) => "image/svg+xml", - _ => throw new ArgumentException("MimeType is required for ImageMessage", nameof(mimeType)) - }; - } - - this.MimeType = mimeType; - } - - public ImageMessage(Role role, BinaryData data, string? from = null) - { - if (data.IsEmpty) - { - throw new ArgumentException("Data cannot be empty", nameof(data)); - } - - if (data.MediaType is null) - { - throw new ArgumentException("MediaType is needed for DataUri Images", nameof(data)); - } - - this.Role = role; - this.From = from; - this.Data = data; - this.MimeType = data.MediaType; - } - - public Role Role { get; } - - public string? Url { get; } - - public string? From { get; set; } - - public BinaryData? Data { get; } - - public string MimeType { get; } - - public string BuildDataUri() - { - if (this.Data is null) - { - throw new NullReferenceException($"{nameof(Data)}"); - } - - return $"data:{this.MimeType};base64,{Convert.ToBase64String(this.Data.ToArray())}"; - } - - public override string ToString() - { - return $"ImageMessage({this.Role}, {(this.Data != null ? BuildDataUri() : this.Url) ?? string.Empty}, {this.From})"; - } -} diff --git a/dotnet/src/AutoGen.Core/Message/Message.cs b/dotnet/src/AutoGen.Core/Message/Message.cs deleted file mode 100644 index 220ee674be..0000000000 --- a/dotnet/src/AutoGen.Core/Message/Message.cs +++ /dev/null @@ -1,61 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Message.cs - -using System; -using System.Collections.Generic; - -namespace AutoGen.Core; - -[Obsolete("This message class is deprecated, please use a specific AutoGen built-in message type instead. For more information, please visit https://ag2ai.github.io/autogen-for-net/articles/Built-in-messages.html")] -public class Message : IMessage -{ - public Message( - Role role, - string? content, - string? from = null, - ToolCall? toolCall = null) - { - this.Role = role; - this.Content = content; - this.From = from; - this.FunctionName = toolCall?.FunctionName; - this.FunctionArguments = toolCall?.FunctionArguments; - } - - public Message(Message other) - : this(other.Role, other.Content, other.From) - { - this.FunctionName = other.FunctionName; - this.FunctionArguments = other.FunctionArguments; - this.Value = other.Value; - this.Metadata = other.Metadata; - } - - public Role Role { get; set; } - - public string? Content { get; set; } - - public string? From { get; set; } - - public string? FunctionName { get; set; } - - public string? FunctionArguments { get; set; } - - /// - /// raw message - /// - public object? Value { get; set; } - - public IList> Metadata { get; set; } = new List>(); - - public override string ToString() - { - return $"Message({this.Role}, {this.Content}, {this.From}, {this.FunctionName}, {this.FunctionArguments})"; - } -} diff --git a/dotnet/src/AutoGen.Core/Message/MessageEnvelope.cs b/dotnet/src/AutoGen.Core/Message/MessageEnvelope.cs deleted file mode 100644 index 4a37968239..0000000000 --- a/dotnet/src/AutoGen.Core/Message/MessageEnvelope.cs +++ /dev/null @@ -1,43 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MessageEnvelope.cs - -using System.Collections.Generic; - -namespace AutoGen.Core; - -public abstract class MessageEnvelope : IMessage -{ - public MessageEnvelope(string? from = null, IDictionary? metadata = null) - { - this.From = from; - this.Metadata = metadata ?? new Dictionary(); - } - - public static MessageEnvelope Create(TContent content, string? from = null, IDictionary? metadata = null) - { - return new MessageEnvelope(content, from, metadata); - } - - public string? From { get; set; } - - public IDictionary Metadata { get; set; } -} - -public class MessageEnvelope : MessageEnvelope, IMessage -{ - public MessageEnvelope(T content, string? from = null, IDictionary? metadata = null) - : base(from, metadata) - { - this.Content = content; - this.From = from; - this.Metadata = metadata ?? new Dictionary(); - } - - public T Content { get; } -} diff --git a/dotnet/src/AutoGen.Core/Message/MultiModalMessage.cs b/dotnet/src/AutoGen.Core/Message/MultiModalMessage.cs deleted file mode 100644 index 392538624b..0000000000 --- a/dotnet/src/AutoGen.Core/Message/MultiModalMessage.cs +++ /dev/null @@ -1,64 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MultiModalMessage.cs - -using System; -using System.Collections.Generic; - -namespace AutoGen.Core; - -public class MultiModalMessage : IMessage -{ - public MultiModalMessage(Role role, IEnumerable content, string? from = null) - { - this.Role = role; - this.Content = content; - this.From = from; - this.Validate(); - } - - public Role Role { get; set; } - - public IEnumerable Content { get; set; } - - public string? From { get; set; } - - private void Validate() - { - foreach (var message in this.Content) - { - if (message.From != this.From) - { - var reason = $"The from property of the message {message} is different from the from property of the aggregate message {this}"; - throw new ArgumentException($"Invalid aggregate message {reason}"); - } - } - - // all message must be either text or image - foreach (var message in this.Content) - { - if (message is not TextMessage && message is not ImageMessage) - { - var reason = $"The message {message} is not a text or image message"; - throw new ArgumentException($"Invalid aggregate message {reason}"); - } - } - } - - public override string ToString() - { - var stringBuilder = new System.Text.StringBuilder(); - stringBuilder.Append($"MultiModalMessage({this.Role}, {this.From})"); - foreach (var message in this.Content) - { - stringBuilder.Append($"\n\t{message}"); - } - - return stringBuilder.ToString(); - } -} diff --git a/dotnet/src/AutoGen.Core/Message/Role.cs b/dotnet/src/AutoGen.Core/Message/Role.cs deleted file mode 100644 index 5509b9b760..0000000000 --- a/dotnet/src/AutoGen.Core/Message/Role.cs +++ /dev/null @@ -1,60 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Role.cs - -using System; - -namespace AutoGen.Core; - -public readonly struct Role : IEquatable -{ - private readonly string label; - - internal Role(string name) - { - label = name; - } - - public static Role User { get; } = new Role("user"); - - public static Role Assistant { get; } = new Role("assistant"); - - public static Role System { get; } = new Role("system"); - - public static Role Function { get; } = new Role("function"); - - public bool Equals(Role other) - { - return label.Equals(other.label, StringComparison.OrdinalIgnoreCase); - } - - public override string ToString() - { - return label; - } - - public override bool Equals(object? obj) - { - return obj is Role other && Equals(other); - } - - public override int GetHashCode() - { - return label.GetHashCode(); - } - - public static bool operator ==(Role left, Role right) - { - return left.Equals(right); - } - - public static bool operator !=(Role left, Role right) - { - return !(left == right); - } -} diff --git a/dotnet/src/AutoGen.Core/Message/TextMessage.cs b/dotnet/src/AutoGen.Core/Message/TextMessage.cs deleted file mode 100644 index 44b7d33ea9..0000000000 --- a/dotnet/src/AutoGen.Core/Message/TextMessage.cs +++ /dev/null @@ -1,79 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// TextMessage.cs - -namespace AutoGen.Core; - -public class TextMessage : IMessage, ICanGetTextContent -{ - public TextMessage(Role role, string content, string? from = null) - { - this.Content = content; - this.Role = role; - this.From = from; - } - - public TextMessage(TextMessageUpdate update) - { - this.Content = update.Content ?? string.Empty; - this.Role = update.Role; - this.From = update.From; - } - - public void Update(TextMessageUpdate update) - { - if (update.Role != this.Role) - { - throw new System.ArgumentException("Role mismatch", nameof(update)); - } - - if (update.From != this.From) - { - throw new System.ArgumentException("From mismatch", nameof(update)); - } - - this.Content = this.Content + update.Content ?? string.Empty; - } - - public Role Role { get; set; } - - public string Content { get; set; } - - public string? From { get; set; } - - public override string ToString() - { - return $"TextMessage({this.Role}, {this.Content}, {this.From})"; - } - - public string? GetContent() - { - return this.Content; - } -} - -public class TextMessageUpdate : IMessage, ICanGetTextContent -{ - public TextMessageUpdate(Role role, string? content, string? from = null) - { - this.Content = content; - this.From = from; - this.Role = role; - } - - public string? Content { get; set; } - - public string? From { get; set; } - - public Role Role { get; set; } - - public string? GetContent() - { - return this.Content; - } -} diff --git a/dotnet/src/AutoGen.Core/Message/ToolCallAggregateMessage.cs b/dotnet/src/AutoGen.Core/Message/ToolCallAggregateMessage.cs deleted file mode 100644 index d45d0ccda1..0000000000 --- a/dotnet/src/AutoGen.Core/Message/ToolCallAggregateMessage.cs +++ /dev/null @@ -1,34 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ToolCallAggregateMessage.cs - -using System.Collections.Generic; - -namespace AutoGen.Core; - -/// -/// An aggregate message that contains a tool call message and a tool call result message. -/// This message type is used by to return both and . -/// -public class ToolCallAggregateMessage : AggregateMessage, ICanGetTextContent, ICanGetToolCalls -{ - public ToolCallAggregateMessage(ToolCallMessage message1, ToolCallResultMessage message2, string? from = null) - : base(message1, message2, from) - { - } - - public string? GetContent() - { - return this.Message2.GetContent(); - } - - public IEnumerable GetToolCalls() - { - return this.Message1.GetToolCalls(); - } -} diff --git a/dotnet/src/AutoGen.Core/Message/ToolCallMessage.cs b/dotnet/src/AutoGen.Core/Message/ToolCallMessage.cs deleted file mode 100644 index 6ca865e521..0000000000 --- a/dotnet/src/AutoGen.Core/Message/ToolCallMessage.cs +++ /dev/null @@ -1,132 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ToolCallMessage.cs - -using System.Collections.Generic; -using System.Linq; -using System.Text; - -namespace AutoGen.Core; - -public class ToolCall -{ - public ToolCall(string functionName, string functionArgs) - { - this.FunctionName = functionName; - this.FunctionArguments = functionArgs; - } - - public ToolCall(string functionName, string functionArgs, string result) - { - this.FunctionName = functionName; - this.FunctionArguments = functionArgs; - this.Result = result; - } - - public string FunctionName { get; set; } - - public string FunctionArguments { get; set; } - - public string? ToolCallId { get; set; } - - public string? Result { get; set; } - - public override string ToString() - { - return $"ToolCall({this.FunctionName}, {this.FunctionArguments}, {this.Result})"; - } -} - -public class ToolCallMessage : IMessage, ICanGetToolCalls, ICanGetTextContent -{ - public ToolCallMessage(IEnumerable toolCalls, string? from = null) - { - this.From = from; - this.ToolCalls = toolCalls.ToList(); - } - - public ToolCallMessage(string functionName, string functionArgs, string? from = null) - { - this.From = from; - this.ToolCalls = new List { new ToolCall(functionName, functionArgs) { ToolCallId = functionName } }; - } - - public ToolCallMessage(ToolCallMessageUpdate update) - { - this.From = update.From; - this.ToolCalls = new List { new ToolCall(update.FunctionName, update.FunctionArgumentUpdate) }; - } - - public void Update(ToolCallMessageUpdate update) - { - // firstly, valid if the update is from the same agent - if (update.From != this.From) - { - throw new System.ArgumentException("From mismatch", nameof(update)); - } - - // if update.FunctionName exists in the tool calls, update the function arguments - var toolCall = this.ToolCalls.FirstOrDefault(tc => tc.FunctionName == update.FunctionName); - if (toolCall is not null) - { - toolCall.FunctionArguments += update.FunctionArgumentUpdate; - } - else - { - this.ToolCalls.Add(new ToolCall(update.FunctionName, update.FunctionArgumentUpdate)); - } - } - - public IList ToolCalls { get; set; } - - public string? From { get; set; } - - /// - /// Some LLMs might also include text content in a tool call response, like GPT. - /// This field is used to store the text content in that case. - /// - public string? Content { get; set; } - - public override string ToString() - { - var sb = new StringBuilder(); - sb.Append($"ToolCallMessage({this.From})"); - foreach (var toolCall in this.ToolCalls) - { - sb.Append($"\n\t{toolCall}"); - } - - return sb.ToString(); - } - - public IEnumerable GetToolCalls() - { - return this.ToolCalls; - } - - public string? GetContent() - { - return this.Content; - } -} - -public class ToolCallMessageUpdate : IMessage -{ - public ToolCallMessageUpdate(string functionName, string functionArgumentUpdate, string? from = null) - { - this.From = from; - this.FunctionName = functionName; - this.FunctionArgumentUpdate = functionArgumentUpdate; - } - - public string? From { get; set; } - - public string FunctionName { get; set; } - - public string FunctionArgumentUpdate { get; set; } -} diff --git a/dotnet/src/AutoGen.Core/Message/ToolCallResultMessage.cs b/dotnet/src/AutoGen.Core/Message/ToolCallResultMessage.cs deleted file mode 100644 index 1726e882b3..0000000000 --- a/dotnet/src/AutoGen.Core/Message/ToolCallResultMessage.cs +++ /dev/null @@ -1,59 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ToolCallResultMessage.cs - -using System.Collections.Generic; -using System.Linq; -using System.Text; - -namespace AutoGen.Core; - -public class ToolCallResultMessage : IMessage, ICanGetTextContent -{ - public ToolCallResultMessage(IEnumerable toolCalls, string? from = null) - { - this.From = from; - this.ToolCalls = toolCalls.ToList(); - } - - public ToolCallResultMessage(string result, string functionName, string functionArgs, string? from = null) - { - this.From = from; - var toolCall = new ToolCall(functionName, functionArgs) { ToolCallId = functionName }; - toolCall.Result = result; - this.ToolCalls = [toolCall]; - } - - /// - /// The original tool call message - /// - public IList ToolCalls { get; set; } - - public string? From { get; set; } - - public string? GetContent() - { - var results = this.ToolCalls - .Where(x => x.Result != null) - .Select(x => x.Result); - - return string.Join("\n", results); - } - - public override string ToString() - { - var sb = new StringBuilder(); - sb.Append($"ToolCallResultMessage({this.From})"); - foreach (var toolCall in this.ToolCalls) - { - sb.Append($"\n\t{toolCall}"); - } - - return sb.ToString(); - } -} diff --git a/dotnet/src/AutoGen.Core/Middleware/DelegateMiddleware.cs b/dotnet/src/AutoGen.Core/Middleware/DelegateMiddleware.cs deleted file mode 100644 index 9a8220e101..0000000000 --- a/dotnet/src/AutoGen.Core/Middleware/DelegateMiddleware.cs +++ /dev/null @@ -1,51 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// DelegateMiddleware.cs - -using System; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Core; - -internal class DelegateMiddleware : IMiddleware -{ - /// - /// middleware delegate. Call into the next function to continue the execution of the next middleware. Otherwise, short cut the middleware execution. - /// - /// cancellation token - public delegate Task MiddlewareDelegate( - MiddlewareContext context, - IAgent agent, - CancellationToken cancellationToken); - - private readonly MiddlewareDelegate middlewareDelegate; - - public DelegateMiddleware(string? name, Func> middlewareDelegate) - { - this.Name = name; - this.middlewareDelegate = async (context, agent, cancellationToken) => - { - return await middlewareDelegate(context, agent, cancellationToken); - }; - } - - public string? Name { get; } - - public Task InvokeAsync( - MiddlewareContext context, - IAgent agent, - CancellationToken cancellationToken = default) - { - var messages = context.Messages; - var options = context.Options; - - return this.middlewareDelegate(context, agent, cancellationToken); - } -} - diff --git a/dotnet/src/AutoGen.Core/Middleware/FunctionCallMiddleware.cs b/dotnet/src/AutoGen.Core/Middleware/FunctionCallMiddleware.cs deleted file mode 100644 index b6b4a58228..0000000000 --- a/dotnet/src/AutoGen.Core/Middleware/FunctionCallMiddleware.cs +++ /dev/null @@ -1,182 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// FunctionCallMiddleware.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Core; - -/// -/// The middleware that process function call message that both send to an agent or reply from an agent. -/// If the last message is and the tool calls is available in this middleware's function map, -/// the tools from the last message will be invoked and a will be returned. In this situation, -/// the inner agent will be short-cut and won't be invoked. -/// Otherwise, the message will be sent to the inner agent. In this situation -/// if the reply from the inner agent is , -/// and the tool calls is available in this middleware's function map, the tools from the reply will be invoked, -/// and a will be returned. -/// -/// If the reply from the inner agent is but the tool calls is not available in this middleware's function map, -/// or the reply from the inner agent is not , the original reply from the inner agent will be returned. -/// -/// When used as a streaming middleware, if the streaming reply from the inner agent is or , -/// This middleware will update the message accordingly and invoke the function if the tool call is available in this middleware's function map. -/// If the streaming reply from the inner agent is other types of message, the most recent message will be used to invoke the function. -/// -/// -public class FunctionCallMiddleware : IStreamingMiddleware -{ - private readonly IEnumerable? functions; - private readonly IDictionary>>? functionMap; - - public FunctionCallMiddleware( - IEnumerable? functions = null, - IDictionary>>? functionMap = null, - string? name = null) - { - this.Name = name ?? nameof(FunctionCallMiddleware); - this.functions = functions; - this.functionMap = functionMap; - } - - public string? Name { get; } - - public async Task InvokeAsync(MiddlewareContext context, IAgent agent, CancellationToken cancellationToken = default) - { - var lastMessage = context.Messages.Last(); - if (lastMessage is ToolCallMessage toolCallMessage) - { - return await this.InvokeToolCallMessagesBeforeInvokingAgentAsync(toolCallMessage, agent); - } - - // combine functions - var options = new GenerateReplyOptions(context.Options ?? new GenerateReplyOptions()); - var combinedFunctions = this.functions?.Concat(options.Functions ?? []) ?? options.Functions; - options.Functions = combinedFunctions?.ToArray(); - - var reply = await agent.GenerateReplyAsync(context.Messages, options, cancellationToken); - - // if the reply is a function call message plus the function's name is available in function map, invoke the function and return the result instead of sending to the agent. - if (reply is ToolCallMessage toolCallMsg) - { - return await this.InvokeToolCallMessagesAfterInvokingAgentAsync(toolCallMsg, agent); - } - - // for all other messages, just return the reply from the agent. - return reply; - } - - public async IAsyncEnumerable InvokeAsync( - MiddlewareContext context, - IStreamingAgent agent, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var lastMessage = context.Messages.Last(); - if (lastMessage is ToolCallMessage toolCallMessage) - { - yield return await this.InvokeToolCallMessagesBeforeInvokingAgentAsync(toolCallMessage, agent); - } - - // combine functions - var options = new GenerateReplyOptions(context.Options ?? new GenerateReplyOptions()); - var combinedFunctions = this.functions?.Concat(options.Functions ?? []) ?? options.Functions; - options.Functions = combinedFunctions?.ToArray(); - - IMessage? mergedFunctionCallMessage = default; - await foreach (var message in agent.GenerateStreamingReplyAsync(context.Messages, options, cancellationToken)) - { - if (message is ToolCallMessageUpdate toolCallMessageUpdate && this.functionMap != null) - { - if (mergedFunctionCallMessage is null) - { - mergedFunctionCallMessage = new ToolCallMessage(toolCallMessageUpdate); - } - else if (mergedFunctionCallMessage is ToolCallMessage toolCall) - { - toolCall.Update(toolCallMessageUpdate); - } - else - { - throw new InvalidOperationException("The first message is ToolCallMessage, but the update message is not ToolCallMessageUpdate"); - } - } - else if (message is ToolCallMessage toolCallMessage1) - { - mergedFunctionCallMessage = toolCallMessage1; - } - else - { - yield return message; - } - } - - if (mergedFunctionCallMessage is ToolCallMessage toolCallMsg) - { - yield return await this.InvokeToolCallMessagesAfterInvokingAgentAsync(toolCallMsg, agent); - } - } - - private async Task InvokeToolCallMessagesBeforeInvokingAgentAsync(ToolCallMessage toolCallMessage, IAgent agent) - { - var toolCallResult = new List(); - var toolCalls = toolCallMessage.ToolCalls; - foreach (var toolCall in toolCalls) - { - var functionName = toolCall.FunctionName; - var functionArguments = toolCall.FunctionArguments; - if (this.functionMap?.TryGetValue(functionName, out var func) is true) - { - var result = await func(functionArguments); - toolCallResult.Add(new ToolCall(functionName, functionArguments, result) { ToolCallId = toolCall.ToolCallId }); - } - else if (this.functionMap is not null) - { - var errorMessage = $"Function {functionName} is not available. Available functions are: {string.Join(", ", this.functionMap.Select(f => f.Key))}"; - - toolCallResult.Add(new ToolCall(functionName, functionArguments, errorMessage) { ToolCallId = toolCall.ToolCallId }); - } - else - { - throw new InvalidOperationException("FunctionMap is not available"); - } - } - - return new ToolCallResultMessage(toolCallResult, from: agent.Name); - } - - private async Task InvokeToolCallMessagesAfterInvokingAgentAsync(ToolCallMessage toolCallMsg, IAgent agent) - { - var toolCallsReply = toolCallMsg.ToolCalls; - var toolCallResult = new List(); - foreach (var toolCall in toolCallsReply) - { - var fName = toolCall.FunctionName; - var fArgs = toolCall.FunctionArguments; - if (this.functionMap?.TryGetValue(fName, out var func) is true) - { - var result = await func(fArgs); - toolCallResult.Add(new ToolCall(fName, fArgs, result) { ToolCallId = toolCall.ToolCallId }); - } - } - - if (toolCallResult.Count() > 0) - { - var toolCallResultMessage = new ToolCallResultMessage(toolCallResult, from: agent.Name); - return new ToolCallAggregateMessage(toolCallMsg, toolCallResultMessage, from: agent.Name); - } - else - { - return toolCallMsg; - } - } -} diff --git a/dotnet/src/AutoGen.Core/Middleware/IMiddleware.cs b/dotnet/src/AutoGen.Core/Middleware/IMiddleware.cs deleted file mode 100644 index de14163ec3..0000000000 --- a/dotnet/src/AutoGen.Core/Middleware/IMiddleware.cs +++ /dev/null @@ -1,32 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// IMiddleware.cs - -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Core; - -/// -/// The middleware interface. For streaming-version middleware, check . -/// -public interface IMiddleware -{ - /// - /// the name of the middleware - /// - public string? Name { get; } - - /// - /// The method to invoke the middleware - /// - public Task InvokeAsync( - MiddlewareContext context, - IAgent agent, - CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/AutoGen.Core/Middleware/IStreamingMiddleware.cs b/dotnet/src/AutoGen.Core/Middleware/IStreamingMiddleware.cs deleted file mode 100644 index d53202513a..0000000000 --- a/dotnet/src/AutoGen.Core/Middleware/IStreamingMiddleware.cs +++ /dev/null @@ -1,27 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// IStreamingMiddleware.cs - -using System.Collections.Generic; -using System.Threading; - -namespace AutoGen.Core; - -/// -/// The streaming middleware interface. For non-streaming version middleware, check . -/// -public interface IStreamingMiddleware : IMiddleware -{ - /// - /// The streaming version of . - /// - public IAsyncEnumerable InvokeAsync( - MiddlewareContext context, - IStreamingAgent agent, - CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/AutoGen.Core/Middleware/MiddlewareContext.cs b/dotnet/src/AutoGen.Core/Middleware/MiddlewareContext.cs deleted file mode 100644 index 11dceddaf9..0000000000 --- a/dotnet/src/AutoGen.Core/Middleware/MiddlewareContext.cs +++ /dev/null @@ -1,33 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MiddlewareContext.cs - -using System.Collections.Generic; - -namespace AutoGen.Core; - -public class MiddlewareContext -{ - public MiddlewareContext( - IEnumerable messages, - GenerateReplyOptions? options) - { - this.Messages = messages; - this.Options = options; - } - - /// - /// Messages to send to the agent - /// - public IEnumerable Messages { get; } - - /// - /// Options to generate the reply - /// - public GenerateReplyOptions? Options { get; } -} diff --git a/dotnet/src/AutoGen.Core/Middleware/PrintMessageMiddleware.cs b/dotnet/src/AutoGen.Core/Middleware/PrintMessageMiddleware.cs deleted file mode 100644 index 9d90243cff..0000000000 --- a/dotnet/src/AutoGen.Core/Middleware/PrintMessageMiddleware.cs +++ /dev/null @@ -1,124 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// PrintMessageMiddleware.cs - -using System; -using System.Collections.Generic; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Core; - -/// -/// The middleware that prints the reply from agent to the console. -/// -public class PrintMessageMiddleware : IStreamingMiddleware -{ - public string? Name => nameof(PrintMessageMiddleware); - - public async Task InvokeAsync(MiddlewareContext context, IAgent agent, CancellationToken cancellationToken = default) - { - if (agent is IStreamingAgent streamingAgent) - { - IMessage? recentUpdate = null; - await foreach (var message in this.InvokeAsync(context, streamingAgent, cancellationToken)) - { - if (message is IMessage imessage) - { - recentUpdate = imessage; - } - } - Console.WriteLine(); - if (recentUpdate is not null && recentUpdate is not TextMessage) - { - Console.WriteLine(recentUpdate.FormatMessage()); - } - - return recentUpdate ?? throw new InvalidOperationException("The message is not a valid message"); - } - else - { - var reply = await agent.GenerateReplyAsync(context.Messages, context.Options, cancellationToken); - - var formattedMessages = reply.FormatMessage(); - - Console.WriteLine(formattedMessages); - - return reply; - } - } - - public async IAsyncEnumerable InvokeAsync(MiddlewareContext context, IStreamingAgent agent, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - IMessage? recentUpdate = null; - await foreach (var message in agent.GenerateStreamingReplyAsync(context.Messages, context.Options, cancellationToken)) - { - if (message is TextMessageUpdate textMessageUpdate) - { - if (recentUpdate is null) - { - // Print from: xxx - Console.WriteLine($"from: {textMessageUpdate.From}"); - recentUpdate = new TextMessage(textMessageUpdate); - Console.Write(textMessageUpdate.Content); - - yield return message; - } - else if (recentUpdate is TextMessage recentTextMessage) - { - // Print the content of the message - Console.Write(textMessageUpdate.Content); - recentTextMessage.Update(textMessageUpdate); - - yield return recentTextMessage; - } - else - { - throw new InvalidOperationException("The recent update is not a TextMessage"); - } - } - else if (message is ToolCallMessageUpdate toolCallUpdate) - { - if (recentUpdate is null) - { - recentUpdate = new ToolCallMessage(toolCallUpdate); - - yield return message; - } - else if (recentUpdate is ToolCallMessage recentToolCallMessage) - { - recentToolCallMessage.Update(toolCallUpdate); - - yield return message; - } - else - { - throw new InvalidOperationException("The recent update is not a ToolCallMessage"); - } - } - else if (message is IMessage imessage) - { - recentUpdate = imessage; - - yield return imessage; - } - else - { - throw new InvalidOperationException("The message is not a valid message"); - } - } - Console.WriteLine(); - if (recentUpdate is not null && recentUpdate is not TextMessage) - { - Console.WriteLine(recentUpdate.FormatMessage()); - } - - yield return recentUpdate ?? throw new InvalidOperationException("The message is not a valid message"); - } -} diff --git a/dotnet/src/AutoGen.Core/Orchestrator/IOrchestrator.cs b/dotnet/src/AutoGen.Core/Orchestrator/IOrchestrator.cs deleted file mode 100644 index 7976d24cf7..0000000000 --- a/dotnet/src/AutoGen.Core/Orchestrator/IOrchestrator.cs +++ /dev/null @@ -1,34 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// IOrchestrator.cs - -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Core; - -public class OrchestrationContext -{ - public IEnumerable Candidates { get; set; } = Array.Empty(); - - public IEnumerable ChatHistory { get; set; } = Array.Empty(); -} - -public interface IOrchestrator -{ - /// - /// Return the next agent as the next speaker. return null if no agent is selected. - /// - /// orchestration context, such as candidate agents and chat history. - /// cancellation token - public Task GetNextSpeakerAsync( - OrchestrationContext context, - CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/AutoGen.Core/Orchestrator/RolePlayOrchestrator.cs b/dotnet/src/AutoGen.Core/Orchestrator/RolePlayOrchestrator.cs deleted file mode 100644 index 1fe1873212..0000000000 --- a/dotnet/src/AutoGen.Core/Orchestrator/RolePlayOrchestrator.cs +++ /dev/null @@ -1,122 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// RolePlayOrchestrator.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Core; - -public class RolePlayOrchestrator : IOrchestrator -{ - private readonly IAgent admin; - private readonly Graph? workflow = null; - public RolePlayOrchestrator(IAgent admin, Graph? workflow = null) - { - this.admin = admin; - this.workflow = workflow; - } - - public async Task GetNextSpeakerAsync( - OrchestrationContext context, - CancellationToken cancellationToken = default) - { - var candidates = context.Candidates.ToList(); - - if (candidates.Count == 0) - { - return null; - } - - if (candidates.Count == 1) - { - return candidates.First(); - } - - // if there's a workflow - // and the next available agent from the workflow is in the group chat - // then return the next agent from the workflow - if (this.workflow != null) - { - var lastMessage = context.ChatHistory.LastOrDefault(); - if (lastMessage == null) - { - return null; - } - var currentSpeaker = candidates.First(candidates => candidates.Name == lastMessage.From); - var nextAgents = await this.workflow.TransitToNextAvailableAgentsAsync(currentSpeaker, context.ChatHistory); - nextAgents = nextAgents.Where(nextAgent => candidates.Any(candidate => candidate.Name == nextAgent.Name)); - candidates = nextAgents.ToList(); - if (!candidates.Any()) - { - return null; - } - - if (candidates is { Count: 1 }) - { - return candidates.First(); - } - } - - // In this case, since there are more than one available agents from the workflow for the next speaker - // the admin will be invoked to decide the next speaker - var agentNames = candidates.Select(candidate => candidate.Name); - var rolePlayMessage = new TextMessage(Role.User, - content: $@"You are in a role play game. Carefully read the conversation history and carry on the conversation. -The available roles are: -{string.Join(",", agentNames)} - -Each message will start with 'From name:', e.g: -From {agentNames.First()}: -//your message//."); - - var chatHistoryWithName = this.ProcessConversationsForRolePlay(context.ChatHistory); - var messages = new IMessage[] { rolePlayMessage }.Concat(chatHistoryWithName); - - var response = await this.admin.GenerateReplyAsync( - messages: messages, - options: new GenerateReplyOptions - { - Temperature = 0, - MaxToken = 128, - StopSequence = [":"], - Functions = null, - }, - cancellationToken: cancellationToken); - - var name = response.GetContent() ?? throw new Exception("No name is returned."); - - // remove From - name = name!.Substring(5); - var candidate = candidates.FirstOrDefault(x => x.Name!.ToLower() == name.ToLower()); - - if (candidate != null) - { - return candidate; - } - - var errorMessage = $"The response from admin is {name}, which is either not in the candidates list or not in the correct format."; - throw new Exception(errorMessage); - } - - private IEnumerable ProcessConversationsForRolePlay(IEnumerable messages) - { - return messages.Select((x, i) => - { - var msg = @$"From {x.From}: -{x.GetContent()} - -round # {i}"; - - return new TextMessage(Role.User, content: msg); - }); - } -} diff --git a/dotnet/src/AutoGen.Core/Orchestrator/RoundRobinOrchestrator.cs b/dotnet/src/AutoGen.Core/Orchestrator/RoundRobinOrchestrator.cs deleted file mode 100644 index 11fa068ac0..0000000000 --- a/dotnet/src/AutoGen.Core/Orchestrator/RoundRobinOrchestrator.cs +++ /dev/null @@ -1,50 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// RoundRobinOrchestrator.cs - -using System.Linq; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Core; - -/// -/// Return the next agent in a round-robin fashion. -/// -/// If the last message is from one of the candidates, the next agent will be the next candidate in the list. -/// -/// -/// Otherwise, the first agent in will be returned. -/// -/// -/// -/// -public class RoundRobinOrchestrator : IOrchestrator -{ - public async Task GetNextSpeakerAsync( - OrchestrationContext context, - CancellationToken cancellationToken = default) - { - var lastMessage = context.ChatHistory.LastOrDefault(); - - if (lastMessage == null) - { - return context.Candidates.FirstOrDefault(); - } - - var candidates = context.Candidates.ToList(); - var lastAgentIndex = candidates.FindIndex(a => a.Name == lastMessage.From); - if (lastAgentIndex == -1) - { - return null; - } - - var nextAgentIndex = (lastAgentIndex + 1) % candidates.Count; - return candidates[nextAgentIndex]; - } -} diff --git a/dotnet/src/AutoGen.Core/Orchestrator/WorkflowOrchestrator.cs b/dotnet/src/AutoGen.Core/Orchestrator/WorkflowOrchestrator.cs deleted file mode 100644 index 651dca0d0c..0000000000 --- a/dotnet/src/AutoGen.Core/Orchestrator/WorkflowOrchestrator.cs +++ /dev/null @@ -1,59 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// WorkflowOrchestrator.cs - -using System.Linq; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Core; - -public class WorkflowOrchestrator : IOrchestrator -{ - private readonly Graph workflow; - - public WorkflowOrchestrator(Graph workflow) - { - this.workflow = workflow; - } - - public async Task GetNextSpeakerAsync( - OrchestrationContext context, - CancellationToken cancellationToken = default) - { - var lastMessage = context.ChatHistory.LastOrDefault(); - if (lastMessage == null) - { - return null; - } - - var candidates = context.Candidates.ToList(); - var currentSpeaker = candidates.FirstOrDefault(candidates => candidates.Name == lastMessage.From); - - if (currentSpeaker == null) - { - return null; - } - var nextAgents = await this.workflow.TransitToNextAvailableAgentsAsync(currentSpeaker, context.ChatHistory); - nextAgents = nextAgents.Where(nextAgent => candidates.Any(candidate => candidate.Name == nextAgent.Name)); - candidates = nextAgents.ToList(); - if (!candidates.Any()) - { - return null; - } - - if (candidates is { Count: 1 }) - { - return candidates.First(); - } - else - { - throw new System.Exception("There are more than one available agents from the workflow for the next speaker."); - } - } -} diff --git a/dotnet/src/AutoGen.DotnetInteractive/AutoGen.DotnetInteractive.csproj b/dotnet/src/AutoGen.DotnetInteractive/AutoGen.DotnetInteractive.csproj deleted file mode 100644 index e850d94944..0000000000 --- a/dotnet/src/AutoGen.DotnetInteractive/AutoGen.DotnetInteractive.csproj +++ /dev/null @@ -1,40 +0,0 @@ -ο»Ώ - - - $(PackageTargetFrameworks) - enable - enable - AutoGen.DotnetInteractive - true - - - - - - - AutoGen.DotnetInteractive - - Dotnet interactive integration for AutoGen agents - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dotnet/src/AutoGen.DotnetInteractive/DotnetInteractiveFunction.cs b/dotnet/src/AutoGen.DotnetInteractive/DotnetInteractiveFunction.cs deleted file mode 100644 index 4be3d21085..0000000000 --- a/dotnet/src/AutoGen.DotnetInteractive/DotnetInteractiveFunction.cs +++ /dev/null @@ -1,186 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// DotnetInteractiveFunction.cs - -using System.Text; -using Microsoft.DotNet.Interactive.Documents; -using Microsoft.DotNet.Interactive.Documents.Jupyter; - -namespace AutoGen.DotnetInteractive; - -public partial class DotnetInteractiveFunction : IDisposable -{ - private readonly InteractiveService? _interactiveService = null; - private string _notebookPath; - private readonly KernelInfoCollection _kernelInfoCollection = new KernelInfoCollection(); - - /// - /// Create an instance of " - /// - /// interactive service to use. - /// notebook path if provided. - public DotnetInteractiveFunction(InteractiveService interactiveService, string? notebookPath = null, bool continueFromExistingNotebook = false) - { - this._interactiveService = interactiveService; - this._notebookPath = notebookPath ?? Path.GetTempPath() + "notebook.ipynb"; - this._kernelInfoCollection.Add(new KernelInfo("csharp")); - this._kernelInfoCollection.Add(new KernelInfo("markdown")); - if (continueFromExistingNotebook == false) - { - // remove existing notebook - if (File.Exists(this._notebookPath)) - { - File.Delete(this._notebookPath); - } - - var document = new InteractiveDocument(); - - using var stream = File.OpenWrite(_notebookPath); - Notebook.Write(document, stream, this._kernelInfoCollection); - stream.Flush(); - stream.Dispose(); - } - else if (continueFromExistingNotebook == true && File.Exists(this._notebookPath)) - { - // load existing notebook - using var readStream = File.OpenRead(this._notebookPath); - var document = Notebook.Read(readStream, this._kernelInfoCollection); - foreach (var cell in document.Elements) - { - if (cell.KernelName == "csharp") - { - var code = cell.Contents; - this._interactiveService.SubmitCSharpCodeAsync(code, default).Wait(); - } - } - } - else - { - // create an empty notebook - var document = new InteractiveDocument(); - - using var stream = File.OpenWrite(_notebookPath); - Notebook.Write(document, stream, this._kernelInfoCollection); - stream.Flush(); - stream.Dispose(); - } - } - - /// - /// Run existing dotnet code from message. Don't modify the code, run it as is. - /// - /// code. - [Function] - public async Task RunCode(string code) - { - if (this._interactiveService == null) - { - throw new Exception("InteractiveService is not initialized."); - } - - var result = await this._interactiveService.SubmitCSharpCodeAsync(code, default); - if (result != null) - { - // if result contains Error, return entire message - if (result.StartsWith("Error:")) - { - return result; - } - - // add cell if _notebookPath is not null - if (this._notebookPath != null) - { - await AddCellAsync(code, "csharp"); - } - - // if result is over 100 characters, only return the first 100 characters. - if (result.Length > 100) - { - result = result.Substring(0, 100) + " (...too long to present)"; - - return result; - } - - return result; - } - - // add cell if _notebookPath is not null - if (this._notebookPath != null) - { - await AddCellAsync(code, "csharp"); - } - - return "Code run successfully. no output is available."; - } - - /// - /// Install nuget packages. - /// - /// nuget package to install. - [Function] - public async Task InstallNugetPackages(string[] nugetPackages) - { - if (this._interactiveService == null) - { - throw new Exception("InteractiveService is not initialized."); - } - - var codeSB = new StringBuilder(); - foreach (var nuget in nugetPackages ?? Array.Empty()) - { - var nugetInstallCommand = $"#r \"nuget:{nuget}\""; - codeSB.AppendLine(nugetInstallCommand); - await this._interactiveService.SubmitCSharpCodeAsync(nugetInstallCommand, default); - } - - var code = codeSB.ToString(); - if (this._notebookPath != null) - { - await AddCellAsync(code, "csharp"); - } - - var sb = new StringBuilder(); - sb.AppendLine("Installed nuget packages:"); - foreach (var nuget in nugetPackages ?? Array.Empty()) - { - sb.AppendLine($"- {nuget}"); - } - - return sb.ToString(); - } - - private async Task AddCellAsync(string cellContent, string kernelName) - { - if (!File.Exists(this._notebookPath)) - { - using var stream = File.OpenWrite(this._notebookPath); - Notebook.Write(new InteractiveDocument(), stream, this._kernelInfoCollection); - stream.Dispose(); - } - - using var readStream = File.OpenRead(this._notebookPath); - var document = Notebook.Read(readStream, this._kernelInfoCollection); - readStream.Dispose(); - - var cell = new InteractiveDocumentElement(cellContent, kernelName); - - document.Add(cell); - - using var writeStream = File.OpenWrite(this._notebookPath); - Notebook.Write(document, writeStream, this._kernelInfoCollection); - // sleep 3 seconds - await Task.Delay(3000); - writeStream.Flush(); - writeStream.Dispose(); - } - - public void Dispose() - { - this._interactiveService?.Dispose(); - } -} diff --git a/dotnet/src/AutoGen.DotnetInteractive/DotnetInteractiveKernelBuilder.cs b/dotnet/src/AutoGen.DotnetInteractive/DotnetInteractiveKernelBuilder.cs deleted file mode 100644 index 7f22634699..0000000000 --- a/dotnet/src/AutoGen.DotnetInteractive/DotnetInteractiveKernelBuilder.cs +++ /dev/null @@ -1,34 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// DotnetInteractiveKernelBuilder.cs - -namespace AutoGen.DotnetInteractive; - -public static class DotnetInteractiveKernelBuilder -{ - -#if NET8_0_OR_GREATER - public static InProccessDotnetInteractiveKernelBuilder CreateEmptyInProcessKernelBuilder() - { - return new InProccessDotnetInteractiveKernelBuilder(); - } - - - public static InProccessDotnetInteractiveKernelBuilder CreateDefaultInProcessKernelBuilder() - { - return new InProccessDotnetInteractiveKernelBuilder() - .AddCSharpKernel() - .AddFSharpKernel(); - } -#endif - - public static DotnetInteractiveStdioKernelConnector CreateKernelBuilder(string workingDirectory, string kernelName = "root-proxy") - { - return new DotnetInteractiveStdioKernelConnector(workingDirectory, kernelName); - } -} diff --git a/dotnet/src/AutoGen.DotnetInteractive/DotnetInteractiveStdioKernelConnector.cs b/dotnet/src/AutoGen.DotnetInteractive/DotnetInteractiveStdioKernelConnector.cs deleted file mode 100644 index f4776f72a4..0000000000 --- a/dotnet/src/AutoGen.DotnetInteractive/DotnetInteractiveStdioKernelConnector.cs +++ /dev/null @@ -1,92 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// DotnetInteractiveStdioKernelConnector.cs - -using AutoGen.DotnetInteractive.Extension; -using Microsoft.DotNet.Interactive; -using Microsoft.DotNet.Interactive.Commands; -using Microsoft.DotNet.Interactive.Connection; - -namespace AutoGen.DotnetInteractive; - -public class DotnetInteractiveStdioKernelConnector -{ - private string workingDirectory; - private InteractiveService interactiveService; - private string kernelName; - private List setupCommands = new List(); - - internal DotnetInteractiveStdioKernelConnector(string workingDirectory, string kernelName = "root-proxy") - { - this.workingDirectory = workingDirectory; - this.interactiveService = new InteractiveService(workingDirectory); - this.kernelName = kernelName; - } - - public DotnetInteractiveStdioKernelConnector RestoreDotnetInteractive() - { - if (this.interactiveService.RestoreDotnetInteractive()) - { - return this; - } - else - { - throw new Exception("Failed to restore dotnet interactive tool."); - } - } - - public DotnetInteractiveStdioKernelConnector AddPythonKernel( - string venv, - string kernelName = "python") - { - var magicCommand = $"#!connect jupyter --kernel-name {kernelName} --kernel-spec {venv}"; - var connectCommand = new SubmitCode(magicCommand); - - this.setupCommands.Add(connectCommand); - - return this; - } - - public async Task BuildAsync(CancellationToken ct = default) - { - var compositeKernel = new CompositeKernel(); - var url = KernelHost.CreateHostUri(this.kernelName); - var cmd = new string[] - { - "dotnet", - "tool", - "run", - "dotnet-interactive", - $"[cb-{this.kernelName}]", - "stdio", - //"--default-kernel", - //"csharp", - "--working-dir", - $@"""{workingDirectory}""", - }; - - var connector = new StdIoKernelConnector( - cmd, - this.kernelName, - url, - new DirectoryInfo(this.workingDirectory)); - - var rootProxyKernel = await connector.CreateRootProxyKernelAsync(); - - rootProxyKernel.KernelInfo.SupportedKernelCommands.Add(new(nameof(SubmitCode))); - - var dotnetKernel = await connector.CreateProxyKernelAsync(".NET"); - foreach (var setupCommand in this.setupCommands) - { - var setupCommandResult = await rootProxyKernel.SendAsync(setupCommand, ct); - setupCommandResult.ThrowOnCommandFailed(); - } - - return rootProxyKernel; - } -} diff --git a/dotnet/src/AutoGen.DotnetInteractive/Extension/AgentExtension.cs b/dotnet/src/AutoGen.DotnetInteractive/Extension/AgentExtension.cs deleted file mode 100644 index 7999bad9c2..0000000000 --- a/dotnet/src/AutoGen.DotnetInteractive/Extension/AgentExtension.cs +++ /dev/null @@ -1,90 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// AgentExtension.cs - -using System.Text; -namespace AutoGen.DotnetInteractive; - -public static class AgentExtension -{ - /// - /// Register an AutoReply hook to run dotnet code block from message. - /// This hook will first detect if there's any dotnet code block (e.g. ```csharp and ```) in the most recent message. - /// if there's any, it will run the code block and send the result back as reply. - /// - /// agent - /// interactive service - /// code block prefix - /// code block suffix - /// maximum output to keep - /// - /// - /// - [Obsolete] - public static IAgent RegisterDotnetCodeBlockExectionHook( - this IAgent agent, - InteractiveService interactiveService, - string codeBlockPrefix = "```csharp", - string codeBlockSuffix = "```", - int maximumOutputToKeep = 500) - { - return agent.RegisterMiddleware(async (msgs, option, innerAgent, ct) => - { - var lastMessage = msgs.LastOrDefault(); - if (lastMessage == null || lastMessage.GetContent() is null) - { - return await innerAgent.GenerateReplyAsync(msgs, option, ct); - } - - // retrieve all code blocks from last message - var codeBlocks = lastMessage.GetContent()!.Split(new[] { codeBlockPrefix }, StringSplitOptions.RemoveEmptyEntries); - if (codeBlocks.Length <= 0) - { - return await innerAgent.GenerateReplyAsync(msgs, option, ct); - } - - // run code blocks - var result = new StringBuilder(); - var i = 0; - result.AppendLine(@$"// [DOTNET_CODE_BLOCK_EXECUTION]"); - foreach (var codeBlock in codeBlocks) - { - var codeBlockIndex = codeBlock.IndexOf(codeBlockSuffix); - - if (codeBlockIndex == -1) - { - continue; - } - - // remove code block suffix - var code = codeBlock.Substring(0, codeBlockIndex).Trim(); - - if (code.Length == 0) - { - continue; - } - - var codeResult = await interactiveService.SubmitCSharpCodeAsync(code, ct); - if (codeResult != null) - { - result.AppendLine(@$"### Executing result for code block {i++}"); - result.AppendLine(codeResult); - result.AppendLine("### End of executing result ###"); - } - } - if (result.Length <= maximumOutputToKeep) - { - maximumOutputToKeep = result.Length; - } - - return new TextMessage(Role.Assistant, result.ToString().Substring(0, maximumOutputToKeep), from: agent.Name); - }); - } -} diff --git a/dotnet/src/AutoGen.DotnetInteractive/Extension/KernelExtension.cs b/dotnet/src/AutoGen.DotnetInteractive/Extension/KernelExtension.cs deleted file mode 100644 index 43d9dee7d6..0000000000 --- a/dotnet/src/AutoGen.DotnetInteractive/Extension/KernelExtension.cs +++ /dev/null @@ -1,87 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// KernelExtension.cs - -using Microsoft.DotNet.Interactive; -using Microsoft.DotNet.Interactive.Commands; -using Microsoft.DotNet.Interactive.Connection; -using Microsoft.DotNet.Interactive.Events; - -namespace AutoGen.DotnetInteractive.Extension; - -public static class KernelExtension -{ - public static async Task RunSubmitCodeCommandAsync( - this Kernel kernel, - string codeBlock, - string targetKernelName, - CancellationToken ct = default) - { - try - { - var cmd = new SubmitCode(codeBlock, targetKernelName); - var res = await kernel.SendAndThrowOnCommandFailedAsync(cmd, ct); - var events = res.Events; - var displayValues = res.Events.Where(x => x is StandardErrorValueProduced || x is StandardOutputValueProduced || x is ReturnValueProduced || x is DisplayedValueProduced) - .SelectMany(x => (x as DisplayEvent)!.FormattedValues); - - if (displayValues is null || displayValues.Count() == 0) - { - return null; - } - - return string.Join("\n", displayValues.Select(x => x.Value)); - } - catch (Exception ex) - { - return $"Error: {ex.Message}"; - } - } - - internal static void SetUpValueSharingIfSupported(this ProxyKernel proxyKernel) - { - var supportedCommands = proxyKernel.KernelInfo.SupportedKernelCommands; - if (supportedCommands.Any(d => d.Name == nameof(RequestValue)) && - supportedCommands.Any(d => d.Name == nameof(SendValue))) - { - proxyKernel.UseValueSharing(); - } - } - - internal static async Task SendAndThrowOnCommandFailedAsync( - this Kernel kernel, - KernelCommand command, - CancellationToken cancellationToken) - { - var result = await kernel.SendAsync(command, cancellationToken); - result.ThrowOnCommandFailed(); - return result; - } - - internal static void ThrowOnCommandFailed(this KernelCommandResult result) - { - var failedEvents = result.Events.OfType(); - if (!failedEvents.Any()) - { - return; - } - - if (failedEvents.Skip(1).Any()) - { - var innerExceptions = failedEvents.Select(f => f.GetException()); - throw new AggregateException(innerExceptions); - } - else - { - throw failedEvents.Single().GetException(); - } - } - - private static Exception GetException(this CommandFailed commandFailedEvent) - => new Exception(commandFailedEvent.Message); -} diff --git a/dotnet/src/AutoGen.DotnetInteractive/Extension/MessageExtension.cs b/dotnet/src/AutoGen.DotnetInteractive/Extension/MessageExtension.cs deleted file mode 100644 index 7a62fb1a80..0000000000 --- a/dotnet/src/AutoGen.DotnetInteractive/Extension/MessageExtension.cs +++ /dev/null @@ -1,59 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MessageExtension.cs - -using System.Text.RegularExpressions; - -namespace AutoGen.DotnetInteractive.Extension; - -public static class MessageExtension -{ - /// - /// Extract a single code block from a message. If the message contains multiple code blocks, only the first one will be returned. - /// - /// - /// code block prefix, e.g. ```csharp - /// code block suffix, e.g. ``` - /// - public static string? ExtractCodeBlock( - this IMessage message, - string codeBlockPrefix, - string codeBlockSuffix) - { - foreach (var codeBlock in message.ExtractCodeBlocks(codeBlockPrefix, codeBlockSuffix)) - { - return codeBlock; - } - - return null; - } - - /// - /// Extract all code blocks from a message. - /// - /// - /// code block prefix, e.g. ```csharp - /// code block suffix, e.g. ``` - /// - public static IEnumerable ExtractCodeBlocks( - this IMessage message, - string codeBlockPrefix, - string codeBlockSuffix) - { - var content = message.GetContent() ?? string.Empty; - if (string.IsNullOrWhiteSpace(content)) - { - yield break; - } - - foreach (Match match in Regex.Matches(content, $@"{codeBlockPrefix}([\s\S]*?){codeBlockSuffix}")) - { - yield return match.Groups[1].Value.Trim(); - } - } -} diff --git a/dotnet/src/AutoGen.DotnetInteractive/GlobalUsing.cs b/dotnet/src/AutoGen.DotnetInteractive/GlobalUsing.cs deleted file mode 100644 index 3c28defb3c..0000000000 --- a/dotnet/src/AutoGen.DotnetInteractive/GlobalUsing.cs +++ /dev/null @@ -1,10 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GlobalUsing.cs - -global using AutoGen.Core; diff --git a/dotnet/src/AutoGen.DotnetInteractive/InProccessDotnetInteractiveKernelBuilder.cs b/dotnet/src/AutoGen.DotnetInteractive/InProccessDotnetInteractiveKernelBuilder.cs deleted file mode 100644 index 971c96d3fc..0000000000 --- a/dotnet/src/AutoGen.DotnetInteractive/InProccessDotnetInteractiveKernelBuilder.cs +++ /dev/null @@ -1,116 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// InProccessDotnetInteractiveKernelBuilder.cs - -#if NET8_0_OR_GREATER -using AutoGen.DotnetInteractive.Extension; -using Microsoft.DotNet.Interactive; -using Microsoft.DotNet.Interactive.Commands; -using Microsoft.DotNet.Interactive.CSharp; -using Microsoft.DotNet.Interactive.FSharp; -using Microsoft.DotNet.Interactive.Jupyter; -using Microsoft.DotNet.Interactive.PackageManagement; -using Microsoft.DotNet.Interactive.PowerShell; - -namespace AutoGen.DotnetInteractive; - -/// -/// Build an in-proc dotnet interactive kernel. -/// -public class InProccessDotnetInteractiveKernelBuilder -{ - private readonly CompositeKernel compositeKernel; - - internal InProccessDotnetInteractiveKernelBuilder() - { - this.compositeKernel = new CompositeKernel(); - - // add jupyter connector - this.compositeKernel.AddKernelConnector( - new ConnectJupyterKernelCommand() - .AddConnectionOptions(new JupyterHttpKernelConnectionOptions()) - .AddConnectionOptions(new JupyterLocalKernelConnectionOptions())); - } - - public InProccessDotnetInteractiveKernelBuilder AddCSharpKernel(IEnumerable? aliases = null) - { - aliases ??= ["c#", "C#", "csharp"]; - // create csharp kernel - var csharpKernel = new CSharpKernel() - .UseNugetDirective((k, resolvedPackageReference) => - { - - k.AddAssemblyReferences(resolvedPackageReference - .SelectMany(r => r.AssemblyPaths)); - return Task.CompletedTask; - }) - .UseKernelHelpers() - .UseWho() - .UseMathAndLaTeX() - .UseValueSharing(); - - this.AddKernel(csharpKernel, aliases); - - return this; - } - - public InProccessDotnetInteractiveKernelBuilder AddFSharpKernel(IEnumerable? aliases = null) - { - aliases ??= ["f#", "F#", "fsharp"]; - // create fsharp kernel - var fsharpKernel = new FSharpKernel() - .UseDefaultFormatting() - .UseKernelHelpers() - .UseWho() - .UseMathAndLaTeX() - .UseValueSharing(); - - this.AddKernel(fsharpKernel, aliases); - - return this; - } - - public InProccessDotnetInteractiveKernelBuilder AddPowershellKernel(IEnumerable? aliases = null) - { - aliases ??= ["pwsh", "powershell"]; - // create powershell kernel - var powershellKernel = new PowerShellKernel() - .UseProfiles() - .UseValueSharing(); - - this.AddKernel(powershellKernel, aliases); - - return this; - } - - public InProccessDotnetInteractiveKernelBuilder AddPythonKernel(string venv, string kernelName = "python") - { - // create python kernel - var magicCommand = $"#!connect jupyter --kernel-name {kernelName} --kernel-spec {venv}"; - var connectCommand = new SubmitCode(magicCommand); - var result = this.compositeKernel.SendAsync(connectCommand).Result; - - result.ThrowOnCommandFailed(); - - return this; - } - - public CompositeKernel Build() - { - return this.compositeKernel - .UseDefaultMagicCommands() - .UseImportMagicCommand(); - } - - private InProccessDotnetInteractiveKernelBuilder AddKernel(Kernel kernel, IEnumerable? aliases = null) - { - this.compositeKernel.Add(kernel, aliases); - return this; - } -} -#endif diff --git a/dotnet/src/AutoGen.DotnetInteractive/InteractiveService.cs b/dotnet/src/AutoGen.DotnetInteractive/InteractiveService.cs deleted file mode 100644 index f729684276..0000000000 --- a/dotnet/src/AutoGen.DotnetInteractive/InteractiveService.cs +++ /dev/null @@ -1,241 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// InteractiveService.cs - -using System.Diagnostics; -using System.Reactive.Linq; -using System.Reflection; -using AutoGen.DotnetInteractive.Extension; -using Microsoft.DotNet.Interactive; -using Microsoft.DotNet.Interactive.Commands; -using Microsoft.DotNet.Interactive.Connection; -using Microsoft.DotNet.Interactive.Events; -using Microsoft.DotNet.Interactive.Utility; - -namespace AutoGen.DotnetInteractive; - -public class InteractiveService : IDisposable -{ - private Kernel? kernel = null; - private Process? process = null; - private bool disposedValue; - private const string DotnetInteractiveToolNotInstallMessage = "Cannot find a tool in the manifest file that has a command named 'dotnet-interactive'."; - //private readonly ProcessJobTracker jobTracker = new ProcessJobTracker(); - private string? installingDirectory; - - /// - /// Install dotnet interactive tool to - /// and create an instance of . - /// - /// When using this constructor, you need to call to install dotnet interactive tool - /// and start the kernel. - /// - /// dotnet interactive installing directory - public InteractiveService(string installingDirectory) - { - this.installingDirectory = installingDirectory; - } - - /// - /// Create an instance of with a running kernel. - /// When using this constructor, you don't need to call to start the kernel. - /// - /// - public InteractiveService(Kernel kernel) - { - this.kernel = kernel; - } - - public Kernel? Kernel => this.kernel; - - public async Task StartAsync(string workingDirectory, CancellationToken ct = default) - { - if (this.kernel != null) - { - return true; - } - - this.kernel = await this.CreateKernelAsync(workingDirectory, true, ct); - return true; - } - - public async Task SubmitCommandAsync(SubmitCode cmd, CancellationToken ct) - { - if (this.kernel == null) - { - throw new Exception("Kernel is not running"); - } - - return await this.kernel.RunSubmitCodeCommandAsync(cmd.Code, cmd.TargetKernelName, ct); - } - - public async Task SubmitPowershellCodeAsync(string code, CancellationToken ct) - { - var command = new SubmitCode(code, targetKernelName: "pwsh"); - return await this.SubmitCommandAsync(command, ct); - } - - public async Task SubmitCSharpCodeAsync(string code, CancellationToken ct) - { - var command = new SubmitCode(code, targetKernelName: "csharp"); - return await this.SubmitCommandAsync(command, ct); - } - - public bool RestoreDotnetInteractive() - { - if (this.installingDirectory is null) - { - throw new Exception("Installing directory is not set"); - } - - // write RestoreInteractive.config from embedded resource to this.workingDirectory - var assembly = Assembly.GetAssembly(typeof(InteractiveService))!; - var resourceName = "AutoGen.DotnetInteractive.RestoreInteractive.config"; - using (var stream = assembly.GetManifestResourceStream(resourceName)!) - using (var fileStream = File.Create(Path.Combine(this.installingDirectory, "RestoreInteractive.config"))) - { - stream.CopyTo(fileStream); - } - - // write dotnet-tool.json from embedded resource to this.workingDirectory - - resourceName = "AutoGen.DotnetInteractive.dotnet-tools.json"; - using (var stream2 = assembly.GetManifestResourceStream(resourceName)!) - using (var fileStream2 = File.Create(Path.Combine(this.installingDirectory, "dotnet-tools.json"))) - { - stream2.CopyTo(fileStream2); - } - - var psi = new ProcessStartInfo - { - FileName = "dotnet", - Arguments = $"tool restore --configfile RestoreInteractive.config", - WorkingDirectory = this.installingDirectory, - RedirectStandardInput = true, - RedirectStandardOutput = true, - RedirectStandardError = true, - UseShellExecute = false, - CreateNoWindow = true, - }; - - using var process = new Process { StartInfo = psi }; - process.OutputDataReceived += this.PrintProcessOutput; - process.ErrorDataReceived += this.PrintProcessOutput; - process.Start(); - process.BeginErrorReadLine(); - process.BeginOutputReadLine(); - process.WaitForExit(); - - return process.ExitCode == 0; - } - - private async Task CreateKernelAsync(string workingDirectory, bool restoreWhenFail = true, CancellationToken ct = default) - { - try - { - var url = KernelHost.CreateHostUriForCurrentProcessId(); - var compositeKernel = new CompositeKernel("cbcomposite"); - var cmd = new string[] - { - "dotnet", - "tool", - "run", - "dotnet-interactive", - $"[cb-{Process.GetCurrentProcess().Id}]", - "stdio", - //"--default-kernel", - //"csharp", - "--working-dir", - $@"""{workingDirectory}""", - }; - var connector = new StdIoKernelConnector( - cmd, - "root-proxy", - url, - new DirectoryInfo(workingDirectory)); - - // Start the dotnet-interactive tool and get a proxy for the root composite kernel therein. - using var rootProxyKernel = await connector.CreateRootProxyKernelAsync().ConfigureAwait(false); - - // Get proxies for each subkernel present inside the dotnet-interactive tool. - var requestKernelInfoCommand = new RequestKernelInfo(rootProxyKernel.KernelInfo.RemoteUri); - var result = - await rootProxyKernel.SendAsync( - requestKernelInfoCommand, - ct).ConfigureAwait(false); - - var subKernels = result.Events.OfType(); - - foreach (var kernelInfoProduced in result.Events.OfType()) - { - var kernelInfo = kernelInfoProduced.KernelInfo; - if (kernelInfo is not null && !kernelInfo.IsProxy && !kernelInfo.IsComposite) - { - var proxyKernel = await connector.CreateProxyKernelAsync(kernelInfo).ConfigureAwait(false); - proxyKernel.SetUpValueSharingIfSupported(); - compositeKernel.Add(proxyKernel); - } - } - - //compositeKernel.DefaultKernelName = "csharp"; - compositeKernel.Add(rootProxyKernel); - - return compositeKernel; - } - catch (CommandLineInvocationException) when (restoreWhenFail) - { - var success = this.RestoreDotnetInteractive(); - - if (success) - { - return await this.CreateKernelAsync(workingDirectory, false, ct); - } - - throw; - } - } - - private void PrintProcessOutput(object sender, DataReceivedEventArgs e) - { - if (!string.IsNullOrEmpty(e.Data)) - { - Console.WriteLine(e.Data); - } - } - - public bool IsRunning() - { - return this.kernel != null; - } - - protected virtual void Dispose(bool disposing) - { - if (!disposedValue) - { - if (disposing) - { - this.kernel?.Dispose(); - - if (this.process != null) - { - this.process.Kill(); - this.process.Dispose(); - } - } - - disposedValue = true; - } - } - - public void Dispose() - { - // Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method - Dispose(disposing: true); - GC.SuppressFinalize(this); - } -} diff --git a/dotnet/src/AutoGen.DotnetInteractive/RestoreInteractive.config b/dotnet/src/AutoGen.DotnetInteractive/RestoreInteractive.config deleted file mode 100644 index 390adb4ab6..0000000000 --- a/dotnet/src/AutoGen.DotnetInteractive/RestoreInteractive.config +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/dotnet/src/AutoGen.DotnetInteractive/dotnet-tools.json b/dotnet/src/AutoGen.DotnetInteractive/dotnet-tools.json deleted file mode 100644 index 12b09e61ca..0000000000 --- a/dotnet/src/AutoGen.DotnetInteractive/dotnet-tools.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "version": 1, - "isRoot": true, - "tools": { - "Microsoft.dotnet-interactive": { - "version": "1.0.522904", - "commands": [ - "dotnet-interactive" - ] - } - } -} \ No newline at end of file diff --git a/dotnet/src/AutoGen.Gemini/AutoGen.Gemini.csproj b/dotnet/src/AutoGen.Gemini/AutoGen.Gemini.csproj deleted file mode 100644 index 9a60596503..0000000000 --- a/dotnet/src/AutoGen.Gemini/AutoGen.Gemini.csproj +++ /dev/null @@ -1,27 +0,0 @@ -ο»Ώ - - - $(PackageTargetFrameworks) - - - - - - - AutoGen.Gemini - - This package provides the intergration with Gemini. - - - - - - - - - - - - - - diff --git a/dotnet/src/AutoGen.Gemini/Extension/FunctionContractExtension.cs b/dotnet/src/AutoGen.Gemini/Extension/FunctionContractExtension.cs deleted file mode 100644 index aa951bcb78..0000000000 --- a/dotnet/src/AutoGen.Gemini/Extension/FunctionContractExtension.cs +++ /dev/null @@ -1,96 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// FunctionContractExtension.cs - -using System.Collections.Generic; -using System.Linq; -using AutoGen.Core; -using Google.Cloud.AIPlatform.V1; -using Json.Schema; -using Json.Schema.Generation; -using OpenAPISchemaType = Google.Cloud.AIPlatform.V1.Type; -using Type = System.Type; - -namespace AutoGen.Gemini.Extension; - -public static class FunctionContractExtension -{ - /// - /// Convert a to a that can be used in gpt funciton call. - /// - public static FunctionDeclaration ToFunctionDeclaration(this FunctionContract function) - { - var required = function.Parameters!.Where(p => p.IsRequired) - .Select(p => p.Name) - .ToList(); - var parameterProperties = new Dictionary(); - - foreach (var parameter in function.Parameters ?? Enumerable.Empty()) - { - var schema = ToOpenApiSchema(parameter.ParameterType); - schema.Description = parameter.Description; - schema.Title = parameter.Name; - schema.Nullable = !parameter.IsRequired; - parameterProperties.Add(parameter.Name!, schema); - } - - return new FunctionDeclaration - { - Name = function.Name, - Description = function.Description, - Parameters = new OpenApiSchema - { - Required = - { - required, - }, - Properties = - { - parameterProperties, - }, - Type = OpenAPISchemaType.Object, - }, - }; - } - - private static OpenApiSchema ToOpenApiSchema(Type? type) - { - if (type == null) - { - return new OpenApiSchema - { - Type = OpenAPISchemaType.Unspecified - }; - } - - var schema = new JsonSchemaBuilder().FromType(type).Build(); - var openApiSchema = new OpenApiSchema - { - Type = schema.GetJsonType() switch - { - SchemaValueType.Array => OpenAPISchemaType.Array, - SchemaValueType.Boolean => OpenAPISchemaType.Boolean, - SchemaValueType.Integer => OpenAPISchemaType.Integer, - SchemaValueType.Number => OpenAPISchemaType.Number, - SchemaValueType.Object => OpenAPISchemaType.Object, - SchemaValueType.String => OpenAPISchemaType.String, - _ => OpenAPISchemaType.Unspecified - }, - }; - - if (schema.GetJsonType() == SchemaValueType.Object && schema.GetProperties() is var properties && properties != null) - { - foreach (var property in properties) - { - openApiSchema.Properties.Add(property.Key, ToOpenApiSchema(property.Value.GetType())); - } - } - - return openApiSchema; - } -} diff --git a/dotnet/src/AutoGen.Gemini/GeminiChatAgent.cs b/dotnet/src/AutoGen.Gemini/GeminiChatAgent.cs deleted file mode 100644 index 3a8f5b0598..0000000000 --- a/dotnet/src/AutoGen.Gemini/GeminiChatAgent.cs +++ /dev/null @@ -1,274 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GeminiChatAgent.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using AutoGen.Core; -using AutoGen.Gemini.Extension; -using Google.Cloud.AIPlatform.V1; -using Google.Protobuf.Collections; -namespace AutoGen.Gemini; - -public class GeminiChatAgent : IStreamingAgent -{ - private readonly IGeminiClient client; - private readonly string? systemMessage; - private readonly string model; - private readonly ToolConfig? toolConfig; - private readonly RepeatedField? safetySettings; - private readonly string responseMimeType; - private readonly Tool[]? tools; - - /// - /// Create that connects to Gemini. - /// - /// the gemini client to use. e.g. - /// agent name - /// the model id. It needs to be in the format of - /// 'projects/{project}/locations/{location}/publishers/{provider}/models/{model}' if the is - /// system message - /// tool config - /// tools - /// safety settings - /// response mime type, available values are ['application/json', 'text/plain'], default is 'text/plain' - public GeminiChatAgent( - IGeminiClient client, - string name, - string model, - string? systemMessage = null, - ToolConfig? toolConfig = null, - Tool[]? tools = null, - RepeatedField? safetySettings = null, - string responseMimeType = "text/plain") - { - this.client = client; - this.Name = name; - this.systemMessage = systemMessage; - this.model = model; - this.toolConfig = toolConfig; - this.safetySettings = safetySettings; - this.responseMimeType = responseMimeType; - this.tools = tools; - } - - /// - /// Create that connects to Gemini using - /// - /// agent name - /// the name of gemini model, e.g. gemini-1.5-flash-001 - /// google gemini api key - /// system message - /// tool config - /// tools - /// - /// response mime type, available values are ['application/json', 'text/plain'], default is 'text/plain' - /// /// - /// - /// - public GeminiChatAgent( - string name, - string model, - string apiKey, - string systemMessage = "You are a helpful AI assistant", - ToolConfig? toolConfig = null, - Tool[]? tools = null, - RepeatedField? safetySettings = null, - string responseMimeType = "text/plain") - : this( - client: new GoogleGeminiClient(apiKey), - name: name, - model: model, - systemMessage: systemMessage, - toolConfig: toolConfig, - tools: tools, - safetySettings: safetySettings, - responseMimeType: responseMimeType) - { - } - - /// - /// Create that connects to Vertex AI. - /// - /// agent name - /// system message - /// the name of gemini model, e.g. gemini-1.5-flash-001 - /// project id - /// model location - /// model provider, default is 'google' - /// tool config - /// tools - /// - /// response mime type, available values are ['application/json', 'text/plain'], default is 'text/plain' - /// - /// - /// - public GeminiChatAgent( - string name, - string model, - string project, - string location, - string provider = "google", - string? systemMessage = null, - ToolConfig? toolConfig = null, - Tool[]? tools = null, - RepeatedField? safetySettings = null, - string responseMimeType = "text/plain") - : this( - client: new VertexGeminiClient(location), - name: name, - model: $"projects/{project}/locations/{location}/publishers/{provider}/models/{model}", - systemMessage: systemMessage, - toolConfig: toolConfig, - tools: tools, - safetySettings: safetySettings, - responseMimeType: responseMimeType) - { - } - - public string Name { get; } - - public async Task GenerateReplyAsync(IEnumerable messages, GenerateReplyOptions? options = null, CancellationToken cancellationToken = default) - { - var request = BuildChatRequest(messages, options); - var response = await this.client.GenerateContentAsync(request, cancellationToken: cancellationToken).ConfigureAwait(false); - - return MessageEnvelope.Create(response, this.Name); - } - - public async IAsyncEnumerable GenerateStreamingReplyAsync(IEnumerable messages, GenerateReplyOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var request = BuildChatRequest(messages, options); - var response = this.client.GenerateContentStreamAsync(request); - - await foreach (var item in response.WithCancellation(cancellationToken).ConfigureAwait(false)) - { - yield return MessageEnvelope.Create(item, this.Name); - } - } - - private GenerateContentRequest BuildChatRequest(IEnumerable messages, GenerateReplyOptions? options) - { - var geminiMessages = messages.Select(m => m switch - { - IMessage contentMessage => contentMessage.Content, - _ => throw new NotSupportedException($"Message type {m.GetType()} is not supported.") - }); - - // there are several rules applies to the messages that can be sent to Gemini in a multi-turn chat - // - The first message must be from the user or function - // - The (user|model) roles must alternate e.g. (user, model, user, model, ...) - // - The last message must be from the user or function - - // check if the first message is from the user - if (geminiMessages.FirstOrDefault()?.Role != "user" && geminiMessages.FirstOrDefault()?.Role != "function") - { - throw new ArgumentException("The first message must be from the user or function", nameof(messages)); - } - - // check if the last message is from the user - if (geminiMessages.LastOrDefault()?.Role != "user" && geminiMessages.LastOrDefault()?.Role != "function") - { - throw new ArgumentException("The last message must be from the user or function", nameof(messages)); - } - - // merge continuous messages with the same role into one message - var mergedMessages = geminiMessages.Aggregate(new List(), (acc, message) => - { - if (acc.Count == 0 || acc.Last().Role != message.Role) - { - acc.Add(message); - } - else - { - acc.Last().Parts.AddRange(message.Parts); - } - - return acc; - }); - - var systemMessage = this.systemMessage switch - { - null => null, - string message => new Content - { - Parts = { new[] { new Part { Text = message } } }, - Role = "system_instruction" - } - }; - - List tools = this.tools?.ToList() ?? new List(); - - var request = new GenerateContentRequest() - { - Contents = { mergedMessages }, - SystemInstruction = systemMessage, - Model = this.model, - GenerationConfig = new GenerationConfig - { - StopSequences = { options?.StopSequence ?? Enumerable.Empty() }, - ResponseMimeType = this.responseMimeType, - CandidateCount = 1, - }, - }; - - if (this.toolConfig is not null) - { - request.ToolConfig = this.toolConfig; - } - - if (this.safetySettings is not null) - { - request.SafetySettings.Add(this.safetySettings); - } - - if (options?.MaxToken.HasValue is true) - { - request.GenerationConfig.MaxOutputTokens = options.MaxToken.Value; - } - - if (options?.Temperature.HasValue is true) - { - request.GenerationConfig.Temperature = options.Temperature.Value; - } - - if (options?.Functions is { Length: > 0 }) - { - foreach (var function in options.Functions) - { - tools.Add(new Tool - { - FunctionDeclarations = { function.ToFunctionDeclaration() }, - }); - } - } - - // merge tools into one tool - // because multipe tools are currently not supported by Gemini - // see https://github.com/googleapis/python-aiplatform/issues/3771 - var aggregatedTool = new Tool - { - FunctionDeclarations = { tools.SelectMany(t => t.FunctionDeclarations) }, - }; - - if (aggregatedTool is { FunctionDeclarations: { Count: > 0 } }) - { - request.Tools.Add(aggregatedTool); - } - - return request; - } -} diff --git a/dotnet/src/AutoGen.Gemini/GoogleGeminiClient.cs b/dotnet/src/AutoGen.Gemini/GoogleGeminiClient.cs deleted file mode 100644 index b5baa50025..0000000000 --- a/dotnet/src/AutoGen.Gemini/GoogleGeminiClient.cs +++ /dev/null @@ -1,89 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GoogleGeminiClient.cs - -using System; -using System.Collections.Generic; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Google.Cloud.AIPlatform.V1; -using Google.Protobuf; - -namespace AutoGen.Gemini; - -public class GoogleGeminiClient : IGeminiClient -{ - private readonly string apiKey; - private const string endpoint = "https://generativelanguage.googleapis.com/v1beta"; - private readonly HttpClient httpClient = new(); - private const string generateContentPath = "models/{0}:generateContent"; - private const string generateContentStreamPath = "models/{0}:streamGenerateContent"; - - public GoogleGeminiClient(HttpClient httpClient, string apiKey) - { - this.apiKey = apiKey; - this.httpClient = httpClient; - } - - public GoogleGeminiClient(string apiKey) - { - this.apiKey = apiKey; - } - - public async Task GenerateContentAsync(GenerateContentRequest request, CancellationToken cancellationToken = default) - { - var path = string.Format(generateContentPath, request.Model); - var url = $"{endpoint}/{path}?key={apiKey}"; - - var httpContent = new StringContent(JsonFormatter.Default.Format(request), System.Text.Encoding.UTF8, "application/json"); - var response = await httpClient.PostAsync(url, httpContent, cancellationToken); - - if (!response.IsSuccessStatusCode) - { - throw new Exception($"Failed to generate content. Status code: {response.StatusCode}"); - } - - var json = await response.Content.ReadAsStringAsync(); - return GenerateContentResponse.Parser.ParseJson(json); - } - - public async IAsyncEnumerable GenerateContentStreamAsync(GenerateContentRequest request) - { - var path = string.Format(generateContentStreamPath, request.Model); - var url = $"{endpoint}/{path}?key={apiKey}&alt=sse"; - - var httpContent = new StringContent(JsonFormatter.Default.Format(request), System.Text.Encoding.UTF8, "application/json"); - var requestMessage = new HttpRequestMessage(HttpMethod.Post, url) - { - Content = httpContent - }; - - var response = await httpClient.SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead); - - if (!response.IsSuccessStatusCode) - { - throw new Exception($"Failed to generate content. Status code: {response.StatusCode}"); - } - - var stream = await response.Content.ReadAsStreamAsync(); - var jp = new JsonParser(JsonParser.Settings.Default.WithIgnoreUnknownFields(true)); - using var streamReader = new System.IO.StreamReader(stream); - while (!streamReader.EndOfStream) - { - var json = await streamReader.ReadLineAsync(); - if (string.IsNullOrWhiteSpace(json)) - { - continue; - } - - json = json.Substring("data:".Length).Trim(); - yield return jp.Parse(json); - } - } -} diff --git a/dotnet/src/AutoGen.Gemini/IGeminiClient.cs b/dotnet/src/AutoGen.Gemini/IGeminiClient.cs deleted file mode 100644 index 3e6683772a..0000000000 --- a/dotnet/src/AutoGen.Gemini/IGeminiClient.cs +++ /dev/null @@ -1,21 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// IGeminiClient.cs - -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Google.Cloud.AIPlatform.V1; - -namespace AutoGen.Gemini; - -public interface IGeminiClient -{ - Task GenerateContentAsync(GenerateContentRequest request, CancellationToken cancellationToken = default); - IAsyncEnumerable GenerateContentStreamAsync(GenerateContentRequest request); -} diff --git a/dotnet/src/AutoGen.Gemini/Middleware/GeminiAgentExtension.cs b/dotnet/src/AutoGen.Gemini/Middleware/GeminiAgentExtension.cs deleted file mode 100644 index 8a43a650fc..0000000000 --- a/dotnet/src/AutoGen.Gemini/Middleware/GeminiAgentExtension.cs +++ /dev/null @@ -1,46 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GeminiAgentExtension.cs - -using AutoGen.Core; - -namespace AutoGen.Gemini; - -public static class GeminiAgentExtension -{ - - /// - /// Register an to the - /// - /// the connector to use. If null, a new instance of will be created. - public static MiddlewareStreamingAgent RegisterMessageConnector( - this GeminiChatAgent agent, GeminiMessageConnector? connector = null) - { - if (connector == null) - { - connector = new GeminiMessageConnector(); - } - - return agent.RegisterStreamingMiddleware(connector); - } - - /// - /// Register an to the where T is - /// - /// the connector to use. If null, a new instance of will be created. - public static MiddlewareStreamingAgent RegisterMessageConnector( - this MiddlewareStreamingAgent agent, GeminiMessageConnector? connector = null) - { - if (connector == null) - { - connector = new GeminiMessageConnector(); - } - - return agent.RegisterStreamingMiddleware(connector); - } -} diff --git a/dotnet/src/AutoGen.Gemini/Middleware/GeminiMessageConnector.cs b/dotnet/src/AutoGen.Gemini/Middleware/GeminiMessageConnector.cs deleted file mode 100644 index b54cb25da8..0000000000 --- a/dotnet/src/AutoGen.Gemini/Middleware/GeminiMessageConnector.cs +++ /dev/null @@ -1,489 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GeminiMessageConnector.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Text.Json; -using System.Text.Json.Nodes; -using System.Threading; -using System.Threading.Tasks; -using AutoGen.Core; -using Google.Cloud.AIPlatform.V1; -using Google.Protobuf; -using Google.Protobuf.WellKnownTypes; -using static Google.Cloud.AIPlatform.V1.Candidate.Types; -using IMessage = AutoGen.Core.IMessage; - -namespace AutoGen.Gemini; - -public class GeminiMessageConnector : IStreamingMiddleware -{ - /// - /// if true, the connector will throw an exception if it encounters an unsupport message type. - /// Otherwise, it will ignore processing the message and return the message as is. - /// - private readonly bool strictMode; - - /// - /// Initializes a new instance of the class. - /// - /// whether to throw an exception if it encounters an unsupport message type. - /// If true, the connector will throw an exception if it encounters an unsupport message type. - /// If false, it will ignore processing the message and return the message as is. - public GeminiMessageConnector(bool strictMode = false) - { - this.strictMode = strictMode; - } - - public string Name => nameof(GeminiMessageConnector); - - public async IAsyncEnumerable InvokeAsync(MiddlewareContext context, IStreamingAgent agent, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var messages = ProcessMessage(context.Messages, agent); - - var bucket = new List(); - - await foreach (var reply in agent.GenerateStreamingReplyAsync(messages, context.Options, cancellationToken)) - { - if (reply is Core.IMessage m) - { - // if m.Content is empty and stop reason is Stop, ignore the message - if (m.Content.Candidates.Count == 1 && m.Content.Candidates[0].Content.Parts.Count == 1 && m.Content.Candidates[0].Content.Parts[0].DataCase == Part.DataOneofCase.Text) - { - var text = m.Content.Candidates[0].Content.Parts[0].Text; - var stopReason = m.Content.Candidates[0].FinishReason; - if (string.IsNullOrEmpty(text) && stopReason == FinishReason.Stop) - { - continue; - } - } - - bucket.Add(m.Content); - - yield return PostProcessStreamingMessage(m.Content, agent); - } - else if (strictMode) - { - throw new InvalidOperationException($"Unsupported message type: {reply.GetType()}"); - } - else - { - yield return reply; - } - - // aggregate the message updates from bucket into a single message - if (bucket is { Count: > 0 }) - { - var isTextMessageUpdates = bucket.All(m => m.Candidates.Count == 1 && m.Candidates[0].Content.Parts.Count == 1 && m.Candidates[0].Content.Parts[0].DataCase == Part.DataOneofCase.Text); - var isFunctionCallUpdates = bucket.Any(m => m.Candidates.Count == 1 && m.Candidates[0].Content.Parts.Count == 1 && m.Candidates[0].Content.Parts[0].DataCase == Part.DataOneofCase.FunctionCall); - if (isTextMessageUpdates) - { - var text = string.Join(string.Empty, bucket.Select(m => m.Candidates[0].Content.Parts[0].Text)); - var textMessage = new TextMessage(Role.Assistant, text, agent.Name); - - yield return textMessage; - } - else if (isFunctionCallUpdates) - { - var functionCallParts = bucket.Where(m => m.Candidates.Count == 1 && m.Candidates[0].Content.Parts.Count == 1 && m.Candidates[0].Content.Parts[0].DataCase == Part.DataOneofCase.FunctionCall) - .Select(m => m.Candidates[0].Content.Parts[0]).ToList(); - - var toolCalls = new List(); - foreach (var part in functionCallParts) - { - var fc = part.FunctionCall; - var toolCall = new ToolCall(fc.Name, fc.Args.ToString()); - - toolCalls.Add(toolCall); - } - - var toolCallMessage = new ToolCallMessage(toolCalls, agent.Name); - - yield return toolCallMessage; - } - else - { - throw new InvalidOperationException("The response should contain either text or tool calls."); - } - } - } - } - - public async Task InvokeAsync(MiddlewareContext context, IAgent agent, CancellationToken cancellationToken = default) - { - var messages = ProcessMessage(context.Messages, agent); - var reply = await agent.GenerateReplyAsync(messages, context.Options, cancellationToken); - - return reply switch - { - Core.IMessage m => PostProcessMessage(m.Content, agent), - _ when strictMode => throw new InvalidOperationException($"Unsupported message type: {reply.GetType()}"), - _ => reply, - }; - } - - private IMessage PostProcessStreamingMessage(GenerateContentResponse m, IAgent agent) - { - this.ValidateGenerateContentResponse(m); - - var candidate = m.Candidates[0]; - var parts = candidate.Content.Parts; - - if (parts.Count == 1 && parts[0].DataCase == Part.DataOneofCase.Text) - { - var content = parts[0].Text; - return new TextMessageUpdate(Role.Assistant, content, agent.Name); - } - else - { - var toolCalls = new List(); - foreach (var part in parts) - { - if (part.DataCase == Part.DataOneofCase.FunctionCall) - { - var fc = part.FunctionCall; - var toolCall = new ToolCall(fc.Name, fc.Args.ToString()); - - toolCalls.Add(toolCall); - } - } - - if (toolCalls.Count > 0) - { - var toolCallMessage = new ToolCallMessage(toolCalls, agent.Name); - return toolCallMessage; - } - else - { - throw new InvalidOperationException("The response should contain either text or tool calls."); - } - } - } - - private IMessage PostProcessMessage(GenerateContentResponse m, IAgent agent) - { - this.ValidateGenerateContentResponse(m); - var candidate = m.Candidates[0]; - var parts = candidate.Content.Parts; - - if (parts.Count == 1 && parts[0].DataCase == Part.DataOneofCase.Text) - { - var content = parts[0].Text; - return new TextMessage(Role.Assistant, content, agent.Name); - } - else - { - var toolCalls = new List(); - foreach (var part in parts) - { - if (part.DataCase == Part.DataOneofCase.FunctionCall) - { - var fc = part.FunctionCall; - var toolCall = new ToolCall(fc.Name, fc.Args.ToString()); - - toolCalls.Add(toolCall); - } - } - - if (toolCalls.Count > 0) - { - var toolCallMessage = new ToolCallMessage(toolCalls, agent.Name); - return toolCallMessage; - } - else - { - throw new InvalidOperationException("The response should contain either text or tool calls."); - } - } - } - - private IEnumerable ProcessMessage(IEnumerable messages, IAgent agent) - { - return messages.SelectMany(m => - { - if (m is Core.IMessage messageEnvelope) - { - return [m]; - } - else - { - return m switch - { - TextMessage textMessage => ProcessTextMessage(textMessage, agent), - ImageMessage imageMessage => ProcessImageMessage(imageMessage, agent), - MultiModalMessage multiModalMessage => ProcessMultiModalMessage(multiModalMessage, agent), - ToolCallMessage toolCallMessage => ProcessToolCallMessage(toolCallMessage, agent), - ToolCallResultMessage toolCallResultMessage => ProcessToolCallResultMessage(toolCallResultMessage, agent), - ToolCallAggregateMessage toolCallAggregateMessage => ProcessToolCallAggregateMessage(toolCallAggregateMessage, agent), - _ when strictMode => throw new InvalidOperationException($"Unsupported message type: {m.GetType()}"), - _ => [m], - }; - } - }); - } - - private IEnumerable ProcessToolCallAggregateMessage(ToolCallAggregateMessage toolCallAggregateMessage, IAgent agent) - { - var parseAsUser = ShouldParseAsUser(toolCallAggregateMessage, agent); - if (parseAsUser) - { - var content = toolCallAggregateMessage.GetContent(); - - if (content is string str) - { - var textMessage = new TextMessage(Role.User, str, toolCallAggregateMessage.From); - - return ProcessTextMessage(textMessage, agent); - } - - return []; - } - else - { - var toolCallContents = ProcessToolCallMessage(toolCallAggregateMessage.Message1, agent); - var toolCallResultContents = ProcessToolCallResultMessage(toolCallAggregateMessage.Message2, agent); - - return toolCallContents.Concat(toolCallResultContents); - } - } - - private void ValidateGenerateContentResponse(GenerateContentResponse response) - { - if (response.Candidates.Count != 1) - { - throw new InvalidOperationException("The response should contain exactly one candidate."); - } - - var candidate = response.Candidates[0]; - if (candidate.Content is null) - { - var finishReason = candidate.FinishReason; - var finishMessage = candidate.FinishMessage; - - throw new InvalidOperationException($"The response should contain content but the content is empty. FinishReason: {finishReason}, FinishMessage: {finishMessage}"); - } - } - - private IEnumerable ProcessToolCallResultMessage(ToolCallResultMessage toolCallResultMessage, IAgent agent) - { - var functionCallResultParts = new List(); - foreach (var toolCallResult in toolCallResultMessage.ToolCalls) - { - if (toolCallResult.Result is null) - { - continue; - } - - // if result is already a json object, use it as is - var json = toolCallResult.Result; - try - { - JsonNode.Parse(json); - } - catch (JsonException) - { - // if the result is not a json object, wrap it in a json object - var result = new { result = json }; - json = JsonSerializer.Serialize(result); - } - var part = new Part - { - FunctionResponse = new FunctionResponse - { - Name = toolCallResult.FunctionName, - Response = Struct.Parser.ParseJson(json), - } - }; - - functionCallResultParts.Add(part); - } - - var content = new Content - { - Parts = { functionCallResultParts }, - Role = "function", - }; - - return [MessageEnvelope.Create(content, toolCallResultMessage.From)]; - } - - private IEnumerable ProcessToolCallMessage(ToolCallMessage toolCallMessage, IAgent agent) - { - var shouldParseAsUser = ShouldParseAsUser(toolCallMessage, agent); - if (strictMode && shouldParseAsUser) - { - throw new InvalidOperationException("ToolCallMessage is not supported as user role in Gemini."); - } - - var functionCallParts = new List(); - foreach (var toolCall in toolCallMessage.ToolCalls) - { - var part = new Part - { - FunctionCall = new FunctionCall - { - Name = toolCall.FunctionName, - Args = Struct.Parser.ParseJson(toolCall.FunctionArguments), - } - }; - - functionCallParts.Add(part); - } - var content = new Content - { - Parts = { functionCallParts }, - Role = "model" - }; - - return [MessageEnvelope.Create(content, toolCallMessage.From)]; - } - - private IEnumerable ProcessMultiModalMessage(MultiModalMessage multiModalMessage, IAgent agent) - { - var parts = new List(); - foreach (var message in multiModalMessage.Content) - { - if (message is TextMessage textMessage) - { - parts.Add(new Part { Text = textMessage.Content }); - } - else if (message is ImageMessage imageMessage) - { - parts.Add(CreateImagePart(imageMessage)); - } - else - { - throw new InvalidOperationException($"Unsupported message type: {message.GetType()}"); - } - } - - var shouldParseAsUser = ShouldParseAsUser(multiModalMessage, agent); - - if (strictMode && !shouldParseAsUser) - { - // image message is not supported as model role in Gemini - throw new InvalidOperationException("Image message is not supported as model role in Gemini."); - } - - var content = new Content - { - Parts = { parts }, - Role = shouldParseAsUser ? "user" : "model", - }; - - return [MessageEnvelope.Create(content, multiModalMessage.From)]; - } - - private IEnumerable ProcessTextMessage(TextMessage textMessage, IAgent agent) - { - if (textMessage.Role == Role.System) - { - // there are only user | model role in Gemini - // if the role is system and the strict mode is enabled, throw an exception - if (strictMode) - { - throw new InvalidOperationException("System role is not supported in Gemini."); - } - - // if strict mode is not enabled, parse the message as a user message - var content = new Content - { - Parts = { new[] { new Part { Text = textMessage.Content } } }, - Role = "user", - }; - - return [MessageEnvelope.Create(content, textMessage.From)]; - } - - var shouldParseAsUser = ShouldParseAsUser(textMessage, agent); - - if (shouldParseAsUser) - { - var content = new Content - { - Parts = { new[] { new Part { Text = textMessage.Content } } }, - Role = "user", - }; - - return [MessageEnvelope.Create(content, textMessage.From)]; - } - else - { - var content = new Content - { - Parts = { new[] { new Part { Text = textMessage.Content } } }, - Role = "model", - }; - - return [MessageEnvelope.Create(content, textMessage.From)]; - } - } - - private IEnumerable ProcessImageMessage(ImageMessage imageMessage, IAgent agent) - { - var imagePart = CreateImagePart(imageMessage); - var shouldParseAsUser = ShouldParseAsUser(imageMessage, agent); - - if (strictMode && !shouldParseAsUser) - { - // image message is not supported as model role in Gemini - throw new InvalidOperationException("Image message is not supported as model role in Gemini."); - } - - var content = new Content - { - Parts = { imagePart }, - Role = shouldParseAsUser ? "user" : "model", - }; - - return [MessageEnvelope.Create(content, imageMessage.From)]; - } - - private Part CreateImagePart(ImageMessage message) - { - if (message.Url is string url) - { - return new Part - { - FileData = new FileData - { - FileUri = url, - MimeType = message.MimeType - } - }; - } - else if (message.Data is BinaryData data) - { - return new Part - { - InlineData = new Blob - { - MimeType = message.MimeType, - Data = ByteString.CopyFrom(data.ToArray()), - } - }; - } - else - { - throw new InvalidOperationException("Invalid ImageMessage, the data or url must be provided"); - } - } - - private bool ShouldParseAsUser(IMessage message, IAgent agent) - { - return message switch - { - TextMessage textMessage => (textMessage.Role == Role.User && textMessage.From is null) - || (textMessage.From != agent.Name), - _ => message.From != agent.Name, - }; - } -} diff --git a/dotnet/src/AutoGen.Gemini/VertexGeminiClient.cs b/dotnet/src/AutoGen.Gemini/VertexGeminiClient.cs deleted file mode 100644 index c4d678517f..0000000000 --- a/dotnet/src/AutoGen.Gemini/VertexGeminiClient.cs +++ /dev/null @@ -1,44 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// VertexGeminiClient.cs - -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Google.Cloud.AIPlatform.V1; - -namespace AutoGen.Gemini; - -internal class VertexGeminiClient : IGeminiClient -{ - private readonly PredictionServiceClient client; - public VertexGeminiClient(PredictionServiceClient client) - { - this.client = client; - } - - public VertexGeminiClient(string location) - { - PredictionServiceClientBuilder builder = new() - { - Endpoint = $"{location}-aiplatform.googleapis.com", - }; - - this.client = builder.Build(); - } - - public Task GenerateContentAsync(GenerateContentRequest request, CancellationToken cancellationToken = default) - { - return client.GenerateContentAsync(request, cancellationToken); - } - - public IAsyncEnumerable GenerateContentStreamAsync(GenerateContentRequest request) - { - return client.StreamGenerateContent(request).GetResponseStream(); - } -} diff --git a/dotnet/src/AutoGen.LMStudio/AutoGen.LMStudio.csproj b/dotnet/src/AutoGen.LMStudio/AutoGen.LMStudio.csproj deleted file mode 100644 index aa891e7129..0000000000 --- a/dotnet/src/AutoGen.LMStudio/AutoGen.LMStudio.csproj +++ /dev/null @@ -1,23 +0,0 @@ -ο»Ώ - - - $(PackageTargetFrameworks) - AutoGen.LMStudio - - - - - - - AutoGen.LMStudio - - Provide support for consuming LMStudio openai-like API service in AutoGen - - - - - - - - - diff --git a/dotnet/src/AutoGen.LMStudio/GlobalUsing.cs b/dotnet/src/AutoGen.LMStudio/GlobalUsing.cs deleted file mode 100644 index 3c28defb3c..0000000000 --- a/dotnet/src/AutoGen.LMStudio/GlobalUsing.cs +++ /dev/null @@ -1,10 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GlobalUsing.cs - -global using AutoGen.Core; diff --git a/dotnet/src/AutoGen.LMStudio/LMStudioAgent.cs b/dotnet/src/AutoGen.LMStudio/LMStudioAgent.cs deleted file mode 100644 index cc543743ab..0000000000 --- a/dotnet/src/AutoGen.LMStudio/LMStudioAgent.cs +++ /dev/null @@ -1,94 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// LMStudioAgent.cs - -using System; -using System.Collections.Generic; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using AutoGen.OpenAI.V1; -using Azure.AI.OpenAI; -using Azure.Core.Pipeline; - -namespace AutoGen.LMStudio; - -/// -/// agent that consumes local server from LM Studio -/// -/// -/// [!code-csharp[LMStudioAgent](../../sample/AutoGen.BasicSamples/Example08_LMStudio.cs?name=lmstudio_example_1)] -/// -public class LMStudioAgent : IAgent -{ - private readonly GPTAgent innerAgent; - - public LMStudioAgent( - string name, - LMStudioConfig config, - string systemMessage = "You are a helpful AI assistant", - float temperature = 0.7f, - int maxTokens = 1024, - IEnumerable? functions = null, - IDictionary>>? functionMap = null) - { - var client = ConfigOpenAIClientForLMStudio(config); - innerAgent = new GPTAgent( - name: name, - systemMessage: systemMessage, - openAIClient: client, - modelName: "llm", // model name doesn't matter for LM Studio - temperature: temperature, - maxTokens: maxTokens, - functions: functions, - functionMap: functionMap); - } - - public string Name => innerAgent.Name; - - public Task GenerateReplyAsync( - IEnumerable messages, - GenerateReplyOptions? options = null, - System.Threading.CancellationToken cancellationToken = default) - { - return innerAgent.GenerateReplyAsync(messages, options, cancellationToken); - } - - private OpenAIClient ConfigOpenAIClientForLMStudio(LMStudioConfig config) - { - // create uri from host and port - var uri = config.Uri; - var handler = new CustomHttpClientHandler(uri); - var httpClient = new HttpClient(handler); - var option = new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2022_12_01) - { - Transport = new HttpClientTransport(httpClient), - }; - - return new OpenAIClient("api-key", option); - } - - private sealed class CustomHttpClientHandler : HttpClientHandler - { - private Uri _modelServiceUrl; - - public CustomHttpClientHandler(Uri modelServiceUrl) - { - _modelServiceUrl = modelServiceUrl; - } - - protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - // request.RequestUri = new Uri($"{_modelServiceUrl}{request.RequestUri.PathAndQuery}"); - var uriBuilder = new UriBuilder(_modelServiceUrl); - uriBuilder.Path = request.RequestUri?.PathAndQuery ?? throw new InvalidOperationException("RequestUri is null"); - request.RequestUri = uriBuilder.Uri; - return base.SendAsync(request, cancellationToken); - } - } -} diff --git a/dotnet/src/AutoGen.LMStudio/LMStudioConfig.cs b/dotnet/src/AutoGen.LMStudio/LMStudioConfig.cs deleted file mode 100644 index 5ba900057b..0000000000 --- a/dotnet/src/AutoGen.LMStudio/LMStudioConfig.cs +++ /dev/null @@ -1,36 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// LMStudioConfig.cs - -using System; - -/// -/// Add support for consuming openai-like API from LM Studio -/// -public class LMStudioConfig : ILLMConfig -{ - public LMStudioConfig(string host, int port) - { - this.Host = host; - this.Port = port; - this.Uri = new Uri($"http://{host}:{port}"); - } - - public LMStudioConfig(Uri uri) - { - this.Uri = uri; - this.Host = uri.Host; - this.Port = uri.Port; - } - - public string Host { get; } - - public int Port { get; } - - public Uri Uri { get; } -} diff --git a/dotnet/src/AutoGen.LMStudio/README.md b/dotnet/src/AutoGen.LMStudio/README.md deleted file mode 100644 index 1e5caf4756..0000000000 --- a/dotnet/src/AutoGen.LMStudio/README.md +++ /dev/null @@ -1,31 +0,0 @@ -## AutoGen.LMStudio - -This package provides support for consuming openai-like API from LMStudio local server. - -## Installation -To use `AutoGen.LMStudio`, add the following package to your `.csproj` file: - -```xml - - - -``` - -## Usage -```csharp -using AutoGen.LMStudio; -var localServerEndpoint = "localhost"; -var port = 5000; -var lmStudioConfig = new LMStudioConfig(localServerEndpoint, port); -var agent = new LMStudioAgent( - name: "agent", - systemMessage: "You are an agent that help user to do some tasks.", - lmStudioConfig: lmStudioConfig) - .RegisterPrintMessage(); // register a hook to print message nicely to console - -await agent.SendAsync("Can you write a piece of C# code to calculate 100th of fibonacci?"); -``` - -## Update history -### Update on 0.0.7 (2024-02-11) -- Add `LMStudioAgent` to support consuming openai-like API from LMStudio local server. diff --git a/dotnet/src/AutoGen.Mistral/Agent/MistralClientAgent.cs b/dotnet/src/AutoGen.Mistral/Agent/MistralClientAgent.cs deleted file mode 100644 index 867cf8a9ce..0000000000 --- a/dotnet/src/AutoGen.Mistral/Agent/MistralClientAgent.cs +++ /dev/null @@ -1,136 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MistralClientAgent.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using AutoGen.Core; -using AutoGen.Mistral.Extension; - -namespace AutoGen.Mistral; - -/// -/// Mistral client agent. -/// -/// This agent supports the following input message types: -/// -/// where T is -/// -/// -/// This agent returns the following message types: -/// -/// where T is -/// -/// -/// You can register this agent with -/// to support more AutoGen message types. -/// -public class MistralClientAgent : IStreamingAgent -{ - private readonly MistralClient _client; - private readonly string _systemMessage; - private readonly string _model; - private readonly int? _randomSeed; - private readonly bool _jsonOutput = false; - private ToolChoiceEnum? _toolChoice; - - /// - /// Create a new instance of . - /// - /// - /// the name of this agent - /// the mistral model id. - /// system message. - /// the seed to generate output. - /// tool choice strategy. - /// use json output. - public MistralClientAgent( - MistralClient client, - string name, - string model, - string systemMessage = "You are a helpful AI assistant", - int? randomSeed = null, - ToolChoiceEnum? toolChoice = null, - bool jsonOutput = false) - { - _client = client; - Name = name; - _systemMessage = systemMessage; - _model = model; - _randomSeed = randomSeed; - _jsonOutput = jsonOutput; - _toolChoice = toolChoice; - } - - public string Name { get; } - - public async Task GenerateReplyAsync( - IEnumerable messages, - GenerateReplyOptions? options = null, - CancellationToken cancellationToken = default) - { - var request = BuildChatRequest(messages, options); - var response = await _client.CreateChatCompletionsAsync(request); - - return new MessageEnvelope(response, from: this.Name); - } - - public async IAsyncEnumerable GenerateStreamingReplyAsync( - IEnumerable messages, - GenerateReplyOptions? options = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var request = BuildChatRequest(messages, options); - var response = _client.StreamingChatCompletionsAsync(request); - - await foreach (var content in response) - { - yield return new MessageEnvelope(content, from: this.Name); - } - } - - private ChatCompletionRequest BuildChatRequest(IEnumerable messages, GenerateReplyOptions? options) - { - var chatHistory = BuildChatHistory(messages); - var chatRequest = new ChatCompletionRequest(model: _model, messages: chatHistory.ToList(), temperature: options?.Temperature, randomSeed: _randomSeed) - { - Stop = options?.StopSequence, - MaxTokens = options?.MaxToken, - ResponseFormat = _jsonOutput ? new ResponseFormat() { ResponseFormatType = "json_object" } : null, - }; - - if (options?.Functions != null) - { - chatRequest.Tools = options.Functions.Select(f => new FunctionTool(f.ToMistralFunctionDefinition())).ToList(); - chatRequest.ToolChoice = _toolChoice ?? ToolChoiceEnum.Auto; - } - - return chatRequest; - } - - private IEnumerable BuildChatHistory(IEnumerable messages) - { - var history = messages.Select(m => m switch - { - IMessage chatMessage => chatMessage.Content, - _ => throw new ArgumentException("Invalid message type") - }); - - // if there's no system message in the history, add one to the beginning - if (!history.Any(m => m.Role == ChatMessage.RoleEnum.System)) - { - history = new[] { new ChatMessage(ChatMessage.RoleEnum.System, _systemMessage) }.Concat(history); - } - - return history; - } -} diff --git a/dotnet/src/AutoGen.Mistral/AutoGen.Mistral.csproj b/dotnet/src/AutoGen.Mistral/AutoGen.Mistral.csproj deleted file mode 100644 index ee905d1177..0000000000 --- a/dotnet/src/AutoGen.Mistral/AutoGen.Mistral.csproj +++ /dev/null @@ -1,23 +0,0 @@ -ο»Ώ - - - $(PackageTargetFrameworks) - AutoGen.Mistral - - - - - - - AutoGen.Mistral - - Provide support for consuming Mistral model in AutoGen - - - - - - - - - diff --git a/dotnet/src/AutoGen.Mistral/Converters/JsonPropertyNameEnumConverter.cs b/dotnet/src/AutoGen.Mistral/Converters/JsonPropertyNameEnumConverter.cs deleted file mode 100644 index d795f09247..0000000000 --- a/dotnet/src/AutoGen.Mistral/Converters/JsonPropertyNameEnumConverter.cs +++ /dev/null @@ -1,49 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// JsonPropertyNameEnumConverter.cs - -using System; -using System.Reflection; -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace AutoGen.Mistral; - -internal class JsonPropertyNameEnumConverter : JsonConverter where T : struct, Enum -{ - public override T Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) - { - string value = reader.GetString() ?? throw new JsonException("Value was null."); - - foreach (var field in typeToConvert.GetFields()) - { - var attribute = field.GetCustomAttribute(); - if (attribute?.Name == value) - { - return (T)Enum.Parse(typeToConvert, field.Name); - } - } - - throw new JsonException($"Unable to convert \"{value}\" to enum {typeToConvert}."); - } - - public override void Write(Utf8JsonWriter writer, T value, JsonSerializerOptions options) - { - var field = value.GetType().GetField(value.ToString()); - var attribute = field?.GetCustomAttribute(); - - if (attribute != null) - { - writer.WriteStringValue(attribute.Name); - } - else - { - writer.WriteStringValue(value.ToString()); - } - } -} diff --git a/dotnet/src/AutoGen.Mistral/DTOs/ChatCompletionRequest.cs b/dotnet/src/AutoGen.Mistral/DTOs/ChatCompletionRequest.cs deleted file mode 100644 index e02ae1ed9d..0000000000 --- a/dotnet/src/AutoGen.Mistral/DTOs/ChatCompletionRequest.cs +++ /dev/null @@ -1,125 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ChatCompletionRequest.cs - -using System; -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace AutoGen.Mistral; - -public class ChatCompletionRequest -{ - /// - /// Initializes a new instance of the class. - /// - /// ID of the model to use. You can use the [List Available Models](/api#operation/listModels) API to see all of your available models, or see our [Model overview](/models) for model descriptions. (required). - /// The prompt(s) to generate completions for, encoded as a list of dict with role and content. The first prompt role should be `user` or `system`. (required). - /// What sampling temperature to use, between 0.0 and 1.0. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or `top_p` but not both. (default to 0.7M). - /// Nucleus sampling, where the model considers the results of the tokens with `top_p` probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or `temperature` but not both. (default to 1M). - /// The maximum number of tokens to generate in the completion. The token count of your prompt plus `max_tokens` cannot exceed the model's context length. . - /// Whether to stream back partial progress. If set, tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message. Otherwise, the server will hold the request open until the timeout or until completion, with the response containing the full result as JSON. (default to false). - /// Whether to inject a safety prompt before all conversations. (default to false). - /// The seed to use for random sampling. If set, different calls will generate deterministic results. . - public ChatCompletionRequest(string? model = default(string), List? messages = default(List), float? temperature = 0.7f, float? topP = 1f, int? maxTokens = default(int?), bool? stream = false, bool safePrompt = false, int? randomSeed = default(int?)) - { - // to ensure "model" is required (not null) - if (model == null) - { - throw new ArgumentNullException("model is a required property for ChatCompletionRequest and cannot be null"); - } - this.Model = model; - // to ensure "messages" is required (not null) - if (messages == null) - { - throw new ArgumentNullException("messages is a required property for ChatCompletionRequest and cannot be null"); - } - this.Messages = messages; - // use default value if no "temperature" provided - this.Temperature = temperature ?? 0.7f; - // use default value if no "topP" provided - this.TopP = topP ?? 1f; - this.MaxTokens = maxTokens; - // use default value if no "stream" provided - this.Stream = stream ?? false; - this.SafePrompt = safePrompt; - this.RandomSeed = randomSeed; - } - /// - /// ID of the model to use. You can use the [List Available Models](/api#operation/listModels) API to see all of your available models, or see our [Model overview](/models) for model descriptions. - /// - /// ID of the model to use. You can use the [List Available Models](/api#operation/listModels) API to see all of your available models, or see our [Model overview](/models) for model descriptions. - /// mistral-tiny - [JsonPropertyName("model")] - public string Model { get; set; } - - /// - /// The prompt(s) to generate completions for, encoded as a list of dict with role and content. The first prompt role should be `user` or `system`. - /// - /// The prompt(s) to generate completions for, encoded as a list of dict with role and content. The first prompt role should be `user` or `system`. - /// [{"role":"user","content":"What is the best French cheese?"}] - [JsonPropertyName("messages")] - public List Messages { get; set; } - - /// - /// What sampling temperature to use, between 0.0 and 1.0. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or `top_p` but not both. - /// - /// What sampling temperature to use, between 0.0 and 1.0. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or `top_p` but not both. - /// 0.7 - [JsonPropertyName("temperature")] - public float? Temperature { get; set; } - - /// - /// Nucleus sampling, where the model considers the results of the tokens with `top_p` probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or `temperature` but not both. - /// - /// Nucleus sampling, where the model considers the results of the tokens with `top_p` probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or `temperature` but not both. - /// 1 - [JsonPropertyName("top_p")] - public float? TopP { get; set; } - - /// - /// The maximum number of tokens to generate in the completion. The token count of your prompt plus `max_tokens` cannot exceed the model's context length. - /// - /// The maximum number of tokens to generate in the completion. The token count of your prompt plus `max_tokens` cannot exceed the model's context length. - /// 16 - [JsonPropertyName("max_tokens")] - public int? MaxTokens { get; set; } - - /// - /// Whether to stream back partial progress. If set, tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message. Otherwise, the server will hold the request open until the timeout or until completion, with the response containing the full result as JSON. - /// - /// Whether to stream back partial progress. If set, tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message. Otherwise, the server will hold the request open until the timeout or until completion, with the response containing the full result as JSON. - [JsonPropertyName("stream")] - public bool? Stream { get; set; } - - /// - /// Whether to inject a safety prompt before all conversations. - /// - /// Whether to inject a safety prompt before all conversations. - [JsonPropertyName("safe_prompt")] - public bool SafePrompt { get; set; } - - /// - /// The seed to use for random sampling. If set, different calls will generate deterministic results. - /// - /// The seed to use for random sampling. If set, different calls will generate deterministic results. - [JsonPropertyName("random_seed")] - public int? RandomSeed { get; set; } - - [JsonPropertyName("stop")] - public string[]? Stop { get; set; } - - [JsonPropertyName("tools")] - public List? Tools { get; set; } - - [JsonPropertyName("tool_choice")] - public ToolChoiceEnum? ToolChoice { get; set; } - - [JsonPropertyName("response_format")] - public ResponseFormat? ResponseFormat { get; set; } = null; -} diff --git a/dotnet/src/AutoGen.Mistral/DTOs/ChatCompletionResponse.cs b/dotnet/src/AutoGen.Mistral/DTOs/ChatCompletionResponse.cs deleted file mode 100644 index e31155a288..0000000000 --- a/dotnet/src/AutoGen.Mistral/DTOs/ChatCompletionResponse.cs +++ /dev/null @@ -1,56 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ChatCompletionResponse.cs - -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace AutoGen.Mistral; - -public class ChatCompletionResponse -{ - /// - /// Gets or Sets Id - /// - /// cmpl-e5cc70bb28c444948073e77776eb30ef - [JsonPropertyName("id")] - public string? Id { get; set; } - - /// - /// Gets or Sets VarObject - /// - /// chat.completion - [JsonPropertyName("object")] - public string? VarObject { get; set; } - - /// - /// Gets or Sets Created - /// - /// 1702256327 - [JsonPropertyName("created")] - public int Created { get; set; } - - /// - /// Gets or Sets Model - /// - /// mistral-tiny - [JsonPropertyName("model")] - public string? Model { get; set; } - - /// - /// Gets or Sets Choices - /// - [JsonPropertyName("choices")] - public List? Choices { get; set; } - - /// - /// Gets or Sets Usage - /// - [JsonPropertyName("usage")] - public Usage? Usage { get; set; } -} diff --git a/dotnet/src/AutoGen.Mistral/DTOs/ChatMessage.cs b/dotnet/src/AutoGen.Mistral/DTOs/ChatMessage.cs deleted file mode 100644 index 81106d86b7..0000000000 --- a/dotnet/src/AutoGen.Mistral/DTOs/ChatMessage.cs +++ /dev/null @@ -1,109 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ChatMessage.cs - -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace AutoGen.Mistral; - -public class ChatMessage -{ - /// - /// Initializes a new instance of the class. - /// - /// role. - /// content. - public ChatMessage(RoleEnum? role = default, string? content = null) - { - this.Role = role; - this.Content = content; - } - - [JsonConverter(typeof(JsonPropertyNameEnumConverter))] - public enum RoleEnum - { - /// - /// Enum System for value: system - /// - [JsonPropertyName("system")] - //[EnumMember(Value = "system")] - System = 1, - - /// - /// Enum User for value: user - /// - [JsonPropertyName("user")] - //[EnumMember(Value = "user")] - User = 2, - - /// - /// Enum Assistant for value: assistant - /// - [JsonPropertyName("assistant")] - //[EnumMember(Value = "assistant")] - Assistant = 3, - - [JsonPropertyName("tool")] - Tool = 4, - } - - /// - /// Gets or Sets Role - /// - [JsonPropertyName("role")] - public RoleEnum? Role { get; set; } - - /// - /// Gets or Sets Content - /// - [JsonPropertyName("content")] - public string? Content { get; set; } - - /// - /// Gets or Sets name for tool calls - /// - [JsonPropertyName("name")] - public string? Name { get; set; } - - [JsonPropertyName("tool_calls")] - public List? ToolCalls { get; set; } - - [JsonPropertyName("tool_call_id")] - public string? ToolCallId { get; set; } -} - -public class FunctionContent -{ - public FunctionContent(string id, FunctionCall function) - { - this.Function = function; - this.Id = id; - } - - [JsonPropertyName("function")] - public FunctionCall Function { get; set; } - - [JsonPropertyName("id")] - public string Id { get; set; } - - public class FunctionCall - { - public FunctionCall(string name, string arguments) - { - this.Name = name; - this.Arguments = arguments; - } - - [JsonPropertyName("name")] - public string Name { get; set; } - - [JsonPropertyName("arguments")] - public string Arguments { get; set; } - } -} diff --git a/dotnet/src/AutoGen.Mistral/DTOs/Choice.cs b/dotnet/src/AutoGen.Mistral/DTOs/Choice.cs deleted file mode 100644 index 40dd5b3c01..0000000000 --- a/dotnet/src/AutoGen.Mistral/DTOs/Choice.cs +++ /dev/null @@ -1,64 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Choice.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.Mistral; - -public class Choice -{ - [JsonConverter(typeof(JsonPropertyNameEnumConverter))] - public enum FinishReasonEnum - { - /// - /// Enum Stop for value: stop - /// - [JsonPropertyName("stop")] - Stop = 1, - - /// - /// Enum Length for value: length - /// - [JsonPropertyName("length")] - Length = 2, - - /// - /// Enum ModelLength for value: model_length - /// - [JsonPropertyName("model_length")] - ModelLength = 3, - - [JsonPropertyName("error")] - Error = 4, - - [JsonPropertyName("tool_calls")] - ToolCalls = 5, - } - - /// - /// Gets or Sets FinishReason - /// - [JsonPropertyName("finish_reason")] - public FinishReasonEnum? FinishReason { get; set; } - - [JsonPropertyName("index")] - public int Index { get; set; } - - /// - /// Gets or Sets Message - /// - [JsonPropertyName("message")] - public ChatMessage? Message { get; set; } - - /// - /// Gets or Sets Delta - /// - [JsonPropertyName("delta")] - public ChatMessage? Delta { get; set; } -} diff --git a/dotnet/src/AutoGen.Mistral/DTOs/Error.cs b/dotnet/src/AutoGen.Mistral/DTOs/Error.cs deleted file mode 100644 index e306036b36..0000000000 --- a/dotnet/src/AutoGen.Mistral/DTOs/Error.cs +++ /dev/null @@ -1,45 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Error.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.Mistral -{ - public class Error - { - public Error(string type, string message, string? param = default(string), string? code = default(string)) - { - Type = type; - Message = message; - Param = param; - Code = code; - } - - [JsonPropertyName("type")] - public string Type { get; set; } - - /// - /// Gets or Sets Message - /// - [JsonPropertyName("message")] - public string Message { get; set; } - - /// - /// Gets or Sets Param - /// - [JsonPropertyName("param")] - public string? Param { get; set; } - - /// - /// Gets or Sets Code - /// - [JsonPropertyName("code")] - public string? Code { get; set; } - } -} diff --git a/dotnet/src/AutoGen.Mistral/DTOs/ErrorResponse.cs b/dotnet/src/AutoGen.Mistral/DTOs/ErrorResponse.cs deleted file mode 100644 index 2e6c96a88d..0000000000 --- a/dotnet/src/AutoGen.Mistral/DTOs/ErrorResponse.cs +++ /dev/null @@ -1,25 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ErrorResponse.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.Mistral; - -public class ErrorResponse -{ - public ErrorResponse(Error error) - { - Error = error; - } - /// - /// Gets or Sets Error - /// - [JsonPropertyName("error")] - public Error Error { get; set; } -} diff --git a/dotnet/src/AutoGen.Mistral/DTOs/FunctionDefinition.cs b/dotnet/src/AutoGen.Mistral/DTOs/FunctionDefinition.cs deleted file mode 100644 index 85da697cab..0000000000 --- a/dotnet/src/AutoGen.Mistral/DTOs/FunctionDefinition.cs +++ /dev/null @@ -1,32 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// FunctionDefinition.cs - -using System.Text.Json.Serialization; -using Json.Schema; - -namespace AutoGen.Mistral; - -public class FunctionDefinition -{ - public FunctionDefinition(string name, string description, JsonSchema? parameters = default) - { - Name = name; - Description = description; - Parameters = parameters; - } - - [JsonPropertyName("name")] - public string Name { get; set; } - - [JsonPropertyName("description")] - public string Description { get; set; } - - [JsonPropertyName("parameters")] - public JsonSchema? Parameters { get; set; } -} diff --git a/dotnet/src/AutoGen.Mistral/DTOs/Model.cs b/dotnet/src/AutoGen.Mistral/DTOs/Model.cs deleted file mode 100644 index 5cfd4effe9..0000000000 --- a/dotnet/src/AutoGen.Mistral/DTOs/Model.cs +++ /dev/null @@ -1,70 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Model.cs - -using System; -using System.Text.Json.Serialization; - -namespace AutoGen.Mistral; - -public class Model -{ - /// - /// Initializes a new instance of the class. - /// - /// id (required). - /// varObject (required). - /// created (required). - /// ownedBy (required). - public Model(string? id = default(string), string? varObject = default(string), int created = default(int), string? ownedBy = default(string)) - { - // to ensure "id" is required (not null) - if (id == null) - { - throw new ArgumentNullException("id is a required property for Model and cannot be null"); - } - this.Id = id; - // to ensure "varObject" is required (not null) - if (varObject == null) - { - throw new ArgumentNullException("varObject is a required property for Model and cannot be null"); - } - this.VarObject = varObject; - this.Created = created; - // to ensure "ownedBy" is required (not null) - if (ownedBy == null) - { - throw new ArgumentNullException("ownedBy is a required property for Model and cannot be null"); - } - this.OwnedBy = ownedBy; - } - - /// - /// Gets or Sets Id - /// - [JsonPropertyName("id")] - public string Id { get; set; } - - /// - /// Gets or Sets VarObject - /// - [JsonPropertyName("object")] - public string VarObject { get; set; } - - /// - /// Gets or Sets Created - /// - [JsonPropertyName("created")] - public int Created { get; set; } - - /// - /// Gets or Sets OwnedBy - /// - [JsonPropertyName("owned_by")] - public string OwnedBy { get; set; } -} diff --git a/dotnet/src/AutoGen.Mistral/DTOs/ResponseFormat.cs b/dotnet/src/AutoGen.Mistral/DTOs/ResponseFormat.cs deleted file mode 100644 index 60d7c9db5d..0000000000 --- a/dotnet/src/AutoGen.Mistral/DTOs/ResponseFormat.cs +++ /dev/null @@ -1,18 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ResponseFormat.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.Mistral; - -public class ResponseFormat -{ - [JsonPropertyName("type")] - public string ResponseFormatType { get; set; } = "json_object"; -} diff --git a/dotnet/src/AutoGen.Mistral/DTOs/Tool.cs b/dotnet/src/AutoGen.Mistral/DTOs/Tool.cs deleted file mode 100644 index c2051bcd12..0000000000 --- a/dotnet/src/AutoGen.Mistral/DTOs/Tool.cs +++ /dev/null @@ -1,57 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Tool.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.Mistral; - -public abstract class ToolBase -{ - [JsonPropertyName("type")] - public string Type { get; set; } - - public ToolBase(string type) - { - Type = type; - } -} - -public class FunctionTool : ToolBase -{ - public FunctionTool(FunctionDefinition function) - : base("function") - { - Function = function; - } - - [JsonPropertyName("function")] - public FunctionDefinition Function { get; set; } -} - -[JsonConverter(typeof(JsonPropertyNameEnumConverter))] -public enum ToolChoiceEnum -{ - /// - /// Auto-detect whether to call a function. - /// - [JsonPropertyName("auto")] - Auto = 0, - - /// - /// Won't call a function. - /// - [JsonPropertyName("none")] - None, - - /// - /// Force to call a function. - /// - [JsonPropertyName("any")] - Any, -} diff --git a/dotnet/src/AutoGen.Mistral/DTOs/Usage.cs b/dotnet/src/AutoGen.Mistral/DTOs/Usage.cs deleted file mode 100644 index b521fa8f5e..0000000000 --- a/dotnet/src/AutoGen.Mistral/DTOs/Usage.cs +++ /dev/null @@ -1,32 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Usage.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.Mistral; - -public class Usage -{ - [JsonPropertyName("prompt_tokens")] - public int PromptTokens { get; set; } - - /// - /// Gets or Sets CompletionTokens - /// - /// 93 - [JsonPropertyName("completion_tokens")] - public int CompletionTokens { get; set; } - - /// - /// Gets or Sets TotalTokens - /// - /// 107 - [JsonPropertyName("total_tokens")] - public int TotalTokens { get; set; } -} diff --git a/dotnet/src/AutoGen.Mistral/Extension/FunctionContractExtension.cs b/dotnet/src/AutoGen.Mistral/Extension/FunctionContractExtension.cs deleted file mode 100644 index ad826bc5b0..0000000000 --- a/dotnet/src/AutoGen.Mistral/Extension/FunctionContractExtension.cs +++ /dev/null @@ -1,65 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// FunctionContractExtension.cs - -using System; -using System.Collections.Generic; -using AutoGen.Core; -using Json.Schema; -using Json.Schema.Generation; - -namespace AutoGen.Mistral.Extension; - -public static class FunctionContractExtension -{ - /// - /// Convert a to a that can be used in funciton call. - /// - /// function contract - /// - public static FunctionDefinition ToMistralFunctionDefinition(this FunctionContract functionContract) - { - var functionDefinition = new FunctionDefinition(functionContract.Name ?? throw new Exception("Function name cannot be null"), functionContract.Description ?? throw new Exception("Function description cannot be null")); - var requiredParameterNames = new List(); - var propertiesSchemas = new Dictionary(); - var propertySchemaBuilder = new JsonSchemaBuilder().Type(SchemaValueType.Object); - foreach (var param in functionContract.Parameters ?? []) - { - if (param.Name is null) - { - throw new InvalidOperationException("Parameter name cannot be null"); - } - - var schemaBuilder = new JsonSchemaBuilder().FromType(param.ParameterType ?? throw new ArgumentNullException(nameof(param.ParameterType))); - if (param.Description != null) - { - schemaBuilder = schemaBuilder.Description(param.Description); - } - - if (param.IsRequired) - { - requiredParameterNames.Add(param.Name); - } - - var schema = schemaBuilder.Build(); - propertiesSchemas[param.Name] = schema; - - } - propertySchemaBuilder = propertySchemaBuilder.Properties(propertiesSchemas); - propertySchemaBuilder = propertySchemaBuilder.Required(requiredParameterNames); - - var option = new System.Text.Json.JsonSerializerOptions() - { - PropertyNamingPolicy = System.Text.Json.JsonNamingPolicy.CamelCase - }; - - functionDefinition.Parameters = propertySchemaBuilder.Build(); - - return functionDefinition; - } -} diff --git a/dotnet/src/AutoGen.Mistral/Extension/MistralAgentExtension.cs b/dotnet/src/AutoGen.Mistral/Extension/MistralAgentExtension.cs deleted file mode 100644 index 9fba7ba6da..0000000000 --- a/dotnet/src/AutoGen.Mistral/Extension/MistralAgentExtension.cs +++ /dev/null @@ -1,43 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MistralAgentExtension.cs - -using AutoGen.Core; - -namespace AutoGen.Mistral.Extension; - -public static class MistralAgentExtension -{ - /// - /// Register a to support more AutoGen message types. - /// - public static MiddlewareStreamingAgent RegisterMessageConnector( - this MistralClientAgent agent, MistralChatMessageConnector? connector = null) - { - if (connector == null) - { - connector = new MistralChatMessageConnector(); - } - - return agent.RegisterStreamingMiddleware(connector); - } - - /// - /// Register a to support more AutoGen message types. - /// - public static MiddlewareStreamingAgent RegisterMessageConnector( - this MiddlewareStreamingAgent agent, MistralChatMessageConnector? connector = null) - { - if (connector == null) - { - connector = new MistralChatMessageConnector(); - } - - return agent.RegisterStreamingMiddleware(connector); - } -} diff --git a/dotnet/src/AutoGen.Mistral/Middleware/MistralChatMessageConnector.cs b/dotnet/src/AutoGen.Mistral/Middleware/MistralChatMessageConnector.cs deleted file mode 100644 index 58c708d08f..0000000000 --- a/dotnet/src/AutoGen.Mistral/Middleware/MistralChatMessageConnector.cs +++ /dev/null @@ -1,328 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MistralChatMessageConnector.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using AutoGen.Core; - -namespace AutoGen.Mistral; - -public class MistralChatMessageConnector : IStreamingMiddleware, IMiddleware -{ - public string? Name => nameof(MistralChatMessageConnector); - - public async IAsyncEnumerable InvokeAsync(MiddlewareContext context, IStreamingAgent agent, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var messages = context.Messages; - var chatMessages = ProcessMessage(messages, agent); - var chunks = new List(); - await foreach (var reply in agent.GenerateStreamingReplyAsync(chatMessages, context.Options, cancellationToken)) - { - if (reply is IMessage chatMessage) - { - chunks.Add(chatMessage.Content); - var response = ProcessChatCompletionResponse(chatMessage, agent); - if (response is not null) - { - yield return response; - } - } - else - { - yield return reply; - } - } - - // if chunks is not empty, then return the aggregate message as the last message - // this is to meet the requirement of streaming call api - // where the last message should be the same result of non-streaming call api - if (chunks.Count == 0) - { - yield break; - } - - var lastResponse = chunks.Last() ?? throw new ArgumentNullException("chunks.Last()"); - var finalResponse = chunks.First() ?? throw new ArgumentNullException("chunks.First()"); - if (lastResponse.Choices!.First().FinishReason == Choice.FinishReasonEnum.ToolCalls) - { - // process as tool call message - foreach (var response in chunks) - { - if (finalResponse.Choices!.First().Message is null) - { - finalResponse.Choices!.First().Message = response.Choices!.First().Delta; - if (finalResponse.Choices!.First().Message!.ToolCalls is null) - { - finalResponse.Choices!.First().Message!.ToolCalls = new List(); - } - } - - if (response.Choices!.First().Delta!.ToolCalls is not null) - { - finalResponse.Choices!.First().Message!.ToolCalls!.AddRange(response.Choices!.First().Delta!.ToolCalls!); - } - - finalResponse.Choices!.First().FinishReason = response.Choices!.First().FinishReason; - - // the usage information will be included in the last message - if (response.Usage is not null) - { - finalResponse.Usage = response.Usage; - } - } - } - else - { - // process as plain text message - foreach (var response in chunks) - { - if (finalResponse.Choices!.First().Message is null) - { - finalResponse.Choices!.First().Message = response.Choices!.First().Delta; - } - - finalResponse.Choices!.First().Message!.Content += response.Choices!.First().Delta!.Content; - finalResponse.Choices!.First().FinishReason = response.Choices!.First().FinishReason; - // the usage information will be included in the last message - if (response.Usage is not null) - { - finalResponse.Usage = response.Usage; - } - } - } - - yield return PostProcessMessage(finalResponse, agent); - } - - public async Task InvokeAsync(MiddlewareContext context, IAgent agent, CancellationToken cancellationToken = default) - { - var messages = context.Messages; - var chatMessages = ProcessMessage(messages, agent); - var response = await agent.GenerateReplyAsync(chatMessages, context.Options, cancellationToken); - - if (response is IMessage chatMessage) - { - return PostProcessMessage(chatMessage.Content, agent); - } - else - { - return response; - } - } - - private IEnumerable ProcessMessage(IEnumerable messages, IAgent agent) - { - return messages.SelectMany(m => - { - if (m is IMessage chatMessage) - { - return [MessageEnvelope.Create(chatMessage.Content, from: chatMessage.From)]; - } - else - { - return m switch - { - TextMessage textMessage => ProcessTextMessage(textMessage, agent), - ToolCallMessage toolCallMessage when (toolCallMessage.From is null || toolCallMessage.From == agent.Name) => ProcessToolCallMessage(toolCallMessage, agent), - ToolCallResultMessage toolCallResultMessage => ProcessToolCallResultMessage(toolCallResultMessage, agent), - AggregateMessage aggregateMessage => ProcessFunctionCallMiddlewareMessage(aggregateMessage, agent), // message type support for functioncall middleware - _ => [m], - }; - } - }); - } - - private IMessage PostProcessMessage(ChatCompletionResponse response, IAgent from) - { - if (response.Choices is null) - { - throw new ArgumentNullException("response.Choices"); - } - - if (response.Choices?.Count != 1) - { - throw new NotSupportedException("response.Choices.Count != 1"); - } - - var choice = response.Choices[0]; - var finishReason = choice.FinishReason ?? throw new ArgumentNullException("choice.FinishReason"); - - if (finishReason == Choice.FinishReasonEnum.Stop || finishReason == Choice.FinishReasonEnum.Length) - { - return new TextMessage(Role.Assistant, choice.Message?.Content ?? throw new ArgumentNullException("choice.Message.Content"), from: from.Name); - } - else if (finishReason == Choice.FinishReasonEnum.ToolCalls) - { - var functionContents = choice.Message?.ToolCalls ?? throw new ArgumentNullException("choice.Message.ToolCalls"); - var toolCalls = functionContents.Select(f => new ToolCall(f.Function.Name, f.Function.Arguments) { ToolCallId = f.Id }).ToList(); - return new ToolCallMessage(toolCalls, from: from.Name); - } - else - { - throw new NotSupportedException($"FinishReason {finishReason} is not supported"); - } - } - - private IMessage? ProcessChatCompletionResponse(IMessage message, IAgent agent) - { - var response = message.Content; - if (response.VarObject != "chat.completion.chunk") - { - throw new NotSupportedException($"VarObject {response.VarObject} is not supported"); - } - if (response.Choices is null) - { - throw new ArgumentNullException("response.Choices"); - } - - if (response.Choices?.Count != 1) - { - throw new NotSupportedException("response.Choices.Count != 1"); - } - - var choice = response.Choices[0]; - var delta = choice.Delta; - - // process text message if delta.content is not null - if (delta?.Content is string content) - { - return new TextMessageUpdate(role: Role.Assistant, content, from: agent.Name); - } - else if (delta?.ToolCalls is var toolCalls && toolCalls is { Count: 1 }) - { - var toolCall = toolCalls[0]; - var functionContent = toolCall.Function; - - return new ToolCallMessageUpdate(functionContent.Name, functionContent.Arguments, from: agent.Name); - } - else - { - return null; - } - } - - private IEnumerable> ProcessTextMessage(TextMessage textMessage, IAgent agent) - { - IEnumerable messages; - // check if textMessage is system message - if (textMessage.Role == Role.System) - { - messages = [new ChatMessage(ChatMessage.RoleEnum.System, textMessage.Content)]; - } - else if (textMessage.From == agent.Name) - { - // if this message is from agent iteself, then its role should be assistant - messages = [new ChatMessage(ChatMessage.RoleEnum.Assistant, textMessage.Content)]; - } - else if (textMessage.From is null) - { - // if from is null, then process the message based on the role - if (textMessage.Role == Role.User) - { - messages = [new ChatMessage(ChatMessage.RoleEnum.User, textMessage.Content)]; - } - else if (textMessage.Role == Role.Assistant) - { - messages = [new ChatMessage(ChatMessage.RoleEnum.Assistant, textMessage.Content)]; - } - else - { - throw new NotSupportedException($"Role {textMessage.Role} is not supported"); - } - } - else - { - // if from is not null, then the message is from user - messages = [new ChatMessage(ChatMessage.RoleEnum.User, textMessage.Content)]; - } - - return messages.Select(m => new MessageEnvelope(m, from: textMessage.From)); - } - - private IEnumerable> ProcessToolCallResultMessage(ToolCallResultMessage toolCallResultMessage, IAgent agent) - { - var from = toolCallResultMessage.From; - var messages = new List(); - foreach (var toolCall in toolCallResultMessage.ToolCalls) - { - if (toolCall.Result is null) - { - continue; - } - - var message = new ChatMessage(ChatMessage.RoleEnum.Tool, content: toolCall.Result) - { - Name = toolCall.FunctionName, - ToolCallId = toolCall.ToolCallId, - }; - - messages.Add(message); - } - - return messages.Select(m => new MessageEnvelope(m, from: toolCallResultMessage.From)); - } - - /// - /// Process the aggregate message from function call middleware. If the message is from another agent, this message will be interpreted as an ordinary plain . - /// If the message is from the same agent or the from field is empty, this message will be expanded to the tool call message and tool call result message. - /// - /// - /// - /// - /// - private IEnumerable> ProcessFunctionCallMiddlewareMessage(AggregateMessage aggregateMessage, IAgent agent) - { - if (aggregateMessage.From is string from && from != agent.Name) - { - // if the message is from another agent, then interpret it as a plain text message - // where the content of the plain text message is the content of the tool call result message - var contents = aggregateMessage.Message2.ToolCalls.Select(t => t.Result); - var messages = contents.Select(c => new ChatMessage(ChatMessage.RoleEnum.Assistant, c)); - - return messages.Select(m => new MessageEnvelope(m, from: from)); - } - - // if the message is from the same agent or the from field is empty, then expand the message to tool call message and tool call result message - var toolCallMessage = aggregateMessage.Message1; - var toolCallResultMessage = aggregateMessage.Message2; - - return this.ProcessToolCallMessage(toolCallMessage, agent).Concat(this.ProcessToolCallResultMessage(toolCallResultMessage, agent)); - } - - private IEnumerable> ProcessToolCallMessage(ToolCallMessage toolCallMessage, IAgent agent) - { - IEnumerable messages; - - // the scenario is not support when tool call message is from another agent - if (toolCallMessage.From is string from && from != agent.Name) - { - throw new NotSupportedException("Tool call message from another agent is not supported"); - } - - // convert tool call message to chat message - var chatMessage = new ChatMessage(ChatMessage.RoleEnum.Assistant); - chatMessage.ToolCalls = new List(); - for (var i = 0; i < toolCallMessage.ToolCalls.Count; i++) - { - var toolCall = toolCallMessage.ToolCalls[i]; - var toolCallId = toolCall.ToolCallId ?? $"{toolCall.FunctionName}_{i}"; - var functionCall = new FunctionContent.FunctionCall(toolCall.FunctionName, toolCall.FunctionArguments); - var functionContent = new FunctionContent(toolCallId, functionCall); - chatMessage.ToolCalls.Add(functionContent); - } - - messages = [chatMessage]; - - return messages.Select(m => new MessageEnvelope(m, from: toolCallMessage.From)); - } -} diff --git a/dotnet/src/AutoGen.Mistral/MistralAIModelID.cs b/dotnet/src/AutoGen.Mistral/MistralAIModelID.cs deleted file mode 100644 index 672cb920ab..0000000000 --- a/dotnet/src/AutoGen.Mistral/MistralAIModelID.cs +++ /dev/null @@ -1,20 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MistralAIModelID.cs - -namespace AutoGen.Mistral; - -public class MistralAIModelID -{ - public const string OPEN_MISTRAL_7B = "open-mistral-7b"; - public const string OPEN_MISTRAL_8X7B = "open-mixtral-8x7b"; - public const string OPEN_MISTRAL_8X22B = "open-mixtral-8x22b"; - public const string MISTRAL_SMALL_LATEST = "mistral-small-latest"; - public const string MISTRAL_MEDIUM_LATEST = "mistral-medium-latest"; - public const string MISTRAL_LARGE_LATEST = "mistral-large-latest"; -} diff --git a/dotnet/src/AutoGen.Mistral/MistralClient.cs b/dotnet/src/AutoGen.Mistral/MistralClient.cs deleted file mode 100644 index 33a3a9ed9b..0000000000 --- a/dotnet/src/AutoGen.Mistral/MistralClient.cs +++ /dev/null @@ -1,174 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MistralClient.cs - -using System; -using System.Collections.Generic; -using System.IO; -using System.Net.Http; -using System.Security.Authentication; -using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading.Tasks; - -namespace AutoGen.Mistral; - -public class MistralClient : IDisposable -{ - private readonly HttpClient _httpClient; - private readonly string baseUrl = "https://api.mistral.ai/v1"; - - public MistralClient(string apiKey, string? baseUrl = null) - { - _httpClient = new HttpClient(); - _httpClient.DefaultRequestHeaders.Accept.Add(new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("application/json")); - _httpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {apiKey}"); - this.baseUrl = baseUrl ?? this.baseUrl; - } - - public MistralClient(HttpClient httpClient, string? baseUrl = null) - { - _httpClient = httpClient; - _httpClient.DefaultRequestHeaders.Accept.Add(new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("application/json")); - this.baseUrl = baseUrl ?? this.baseUrl; - } - - public async Task CreateChatCompletionsAsync(ChatCompletionRequest chatCompletionRequest) - { - chatCompletionRequest.Stream = false; - var response = await HttpRequestRaw(HttpMethod.Post, chatCompletionRequest); - response.EnsureSuccessStatusCode(); - - var responseStream = await response.Content.ReadAsStreamAsync(); - return await JsonSerializer.DeserializeAsync(responseStream) ?? throw new Exception("Failed to deserialize response"); - } - - public async IAsyncEnumerable StreamingChatCompletionsAsync(ChatCompletionRequest chatCompletionRequest) - { - chatCompletionRequest.Stream = true; - var response = await HttpRequestRaw(HttpMethod.Post, chatCompletionRequest, streaming: true); - using var stream = await response.Content.ReadAsStreamAsync(); - using StreamReader reader = new StreamReader(stream); - string? line = null; - - SseEvent currentEvent = new SseEvent(); - while ((line = await reader.ReadLineAsync()) != null) - { - if (!string.IsNullOrEmpty(line)) - { - currentEvent.Data = line.Substring("data:".Length).Trim(); - } - else // an empty line indicates the end of an event - { - if (currentEvent.Data == "[DONE]") - { - continue; - } - else if (currentEvent.EventType == null) - { - var res = await JsonSerializer.DeserializeAsync( - new MemoryStream(Encoding.UTF8.GetBytes(currentEvent.Data ?? string.Empty))) ?? throw new Exception("Failed to deserialize response"); - yield return res; - } - else if (currentEvent.EventType != null) - { - var res = await JsonSerializer.DeserializeAsync( - new MemoryStream(Encoding.UTF8.GetBytes(currentEvent.Data ?? string.Empty))); - throw new Exception(res?.Error.Message); - } - - // Reset the current event for the next one - currentEvent = new SseEvent(); - } - } - } - - protected async Task HttpRequestRaw(HttpMethod verb, object postData, bool streaming = false) - { - var url = $"{baseUrl}/chat/completions"; - HttpResponseMessage response; - string resultAsString; - HttpRequestMessage req = new HttpRequestMessage(verb, url); - - if (postData != null) - { - if (postData is HttpContent) - { - req.Content = postData as HttpContent; - } - else - { - string jsonContent = JsonSerializer.Serialize(postData, - new JsonSerializerOptions() { DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull }); - var stringContent = new StringContent(jsonContent, Encoding.UTF8, "application/json"); - req.Content = stringContent; - } - } - - response = await this._httpClient.SendAsync(req, - streaming ? HttpCompletionOption.ResponseHeadersRead : HttpCompletionOption.ResponseContentRead); - - if (response.IsSuccessStatusCode) - { - return response; - } - else - { - try - { - resultAsString = await response.Content.ReadAsStringAsync(); - } - catch (Exception e) - { - resultAsString = - "Additionally, the following error was thrown when attempting to read the response content: " + - e.ToString(); - } - - if (response.StatusCode == System.Net.HttpStatusCode.Unauthorized) - { - throw new AuthenticationException( - "Mistral rejected your authorization, most likely due to an invalid API Key. Full API response follows: " + - resultAsString); - } - else if (response.StatusCode == System.Net.HttpStatusCode.InternalServerError) - { - throw new HttpRequestException( - "Mistral had an internal server error, which can happen occasionally. Please retry your request. " + - GetErrorMessage(resultAsString, response, url, url)); - } - else - { - throw new HttpRequestException(GetErrorMessage(resultAsString, response, url, url)); - } - } - } - - private string GetErrorMessage(string resultAsString, HttpResponseMessage response, string name, string description = "") - { - return $"Error at {name} ({description}) with HTTP status code: {response.StatusCode}. Content: {resultAsString ?? ""}"; - } - - public void Dispose() - { - _httpClient.Dispose(); - } - - public class SseEvent - { - public SseEvent(string? eventType = null, string? data = null) - { - EventType = eventType; - Data = data; - } - - public string? EventType { get; set; } - public string? Data { get; set; } - } -} diff --git a/dotnet/src/AutoGen.Ollama/Agent/OllamaAgent.cs b/dotnet/src/AutoGen.Ollama/Agent/OllamaAgent.cs deleted file mode 100644 index e0ea85f6b4..0000000000 --- a/dotnet/src/AutoGen.Ollama/Agent/OllamaAgent.cs +++ /dev/null @@ -1,191 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OllamaAgent.cs - -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Net.Http; -using System.Runtime.CompilerServices; -using System.Text; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using AutoGen.Core; - -namespace AutoGen.Ollama; - -/// -/// An agent that can interact with ollama models. -/// -public class OllamaAgent : IStreamingAgent -{ - private readonly HttpClient _httpClient; - private readonly string _modelName; - private readonly string _systemMessage; - private readonly OllamaReplyOptions? _replyOptions; - - public OllamaAgent(HttpClient httpClient, string name, string modelName, - string systemMessage = "You are a helpful AI assistant", - OllamaReplyOptions? replyOptions = null) - { - Name = name; - _httpClient = httpClient; - _modelName = modelName; - _systemMessage = systemMessage; - _replyOptions = replyOptions; - } - - public async Task GenerateReplyAsync( - IEnumerable messages, GenerateReplyOptions? options = null, CancellationToken cancellation = default) - { - ChatRequest request = await BuildChatRequest(messages, options); - request.Stream = false; - var httpRequest = BuildRequest(request); - using (HttpResponseMessage? response = await _httpClient.SendAsync(httpRequest, HttpCompletionOption.ResponseContentRead, cancellation)) - { - response.EnsureSuccessStatusCode(); - Stream? streamResponse = await response.Content.ReadAsStreamAsync(); - ChatResponse chatResponse = await JsonSerializer.DeserializeAsync(streamResponse, cancellationToken: cancellation) - ?? throw new Exception("Failed to deserialize response"); - var output = new MessageEnvelope(chatResponse, from: Name); - return output; - } - } - - public async IAsyncEnumerable GenerateStreamingReplyAsync( - IEnumerable messages, - GenerateReplyOptions? options = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - ChatRequest request = await BuildChatRequest(messages, options); - request.Stream = true; - HttpRequestMessage message = BuildRequest(request); - using (HttpResponseMessage? response = await _httpClient.SendAsync(message, HttpCompletionOption.ResponseHeadersRead, cancellationToken)) - { - response.EnsureSuccessStatusCode(); - using Stream? stream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); - using var reader = new StreamReader(stream); - - while (!reader.EndOfStream && !cancellationToken.IsCancellationRequested) - { - string? line = await reader.ReadLineAsync(); - if (string.IsNullOrWhiteSpace(line)) - { - continue; - } - - ChatResponseUpdate? update = JsonSerializer.Deserialize(line); - if (update is { Done: false }) - { - yield return new MessageEnvelope(update, from: Name); - } - else - { - var finalUpdate = JsonSerializer.Deserialize(line) ?? throw new Exception("Failed to deserialize response"); - - yield return new MessageEnvelope(finalUpdate, from: Name); - } - } - } - } - - public string Name { get; } - - private async Task BuildChatRequest(IEnumerable messages, GenerateReplyOptions? options) - { - var request = new ChatRequest - { - Model = _modelName, - Messages = await BuildChatHistory(messages) - }; - - if (options is OllamaReplyOptions replyOptions) - { - BuildChatRequestOptions(replyOptions, request); - return request; - } - - if (_replyOptions != null) - { - BuildChatRequestOptions(_replyOptions, request); - return request; - } - return request; - } - private void BuildChatRequestOptions(OllamaReplyOptions replyOptions, ChatRequest request) - { - request.Format = replyOptions.Format == FormatType.Json ? OllamaConsts.JsonFormatType : null; - request.Template = replyOptions.Template; - request.KeepAlive = replyOptions.KeepAlive; - - if (replyOptions.Temperature != null - || replyOptions.MaxToken != null - || replyOptions.StopSequence != null - || replyOptions.Seed != null - || replyOptions.MiroStat != null - || replyOptions.MiroStatEta != null - || replyOptions.MiroStatTau != null - || replyOptions.NumCtx != null - || replyOptions.NumGqa != null - || replyOptions.NumGpu != null - || replyOptions.NumThread != null - || replyOptions.RepeatLastN != null - || replyOptions.RepeatPenalty != null - || replyOptions.TopK != null - || replyOptions.TopP != null - || replyOptions.TfsZ != null) - { - request.Options = new ModelReplyOptions - { - Temperature = replyOptions.Temperature, - NumPredict = replyOptions.MaxToken, - Stop = replyOptions.StopSequence?[0], - Seed = replyOptions.Seed, - MiroStat = replyOptions.MiroStat, - MiroStatEta = replyOptions.MiroStatEta, - MiroStatTau = replyOptions.MiroStatTau, - NumCtx = replyOptions.NumCtx, - NumGqa = replyOptions.NumGqa, - NumGpu = replyOptions.NumGpu, - NumThread = replyOptions.NumThread, - RepeatLastN = replyOptions.RepeatLastN, - RepeatPenalty = replyOptions.RepeatPenalty, - TopK = replyOptions.TopK, - TopP = replyOptions.TopP, - TfsZ = replyOptions.TfsZ - }; - } - } - private async Task> BuildChatHistory(IEnumerable messages) - { - var history = messages.Select(m => m switch - { - IMessage chatMessage => chatMessage.Content, - _ => throw new ArgumentException("Invalid message type") - }); - - // if there's no system message in the history, add one to the beginning - if (!history.Any(m => m.Role == "system")) - { - history = new[] { new Message() { Role = "system", Value = _systemMessage } }.Concat(history); - } - - return history.ToList(); - } - - private static HttpRequestMessage BuildRequest(ChatRequest request) - { - string serialized = JsonSerializer.Serialize(request); - return new HttpRequestMessage(HttpMethod.Post, OllamaConsts.ChatCompletionEndpoint) - { - Content = new StringContent(serialized, Encoding.UTF8, OllamaConsts.JsonMediaType) - }; - } -} diff --git a/dotnet/src/AutoGen.Ollama/AutoGen.Ollama.csproj b/dotnet/src/AutoGen.Ollama/AutoGen.Ollama.csproj deleted file mode 100644 index 512fe92f3e..0000000000 --- a/dotnet/src/AutoGen.Ollama/AutoGen.Ollama.csproj +++ /dev/null @@ -1,23 +0,0 @@ -ο»Ώ - - - $(PackageTargetFrameworks) - AutoGen.Ollama - True - - - - - - - AutoGen.Ollama - - Provide support for Ollama server in AutoGen - - - - - - - - diff --git a/dotnet/src/AutoGen.Ollama/DTOs/ChatRequest.cs b/dotnet/src/AutoGen.Ollama/DTOs/ChatRequest.cs deleted file mode 100644 index 9bd6c9a6f3..0000000000 --- a/dotnet/src/AutoGen.Ollama/DTOs/ChatRequest.cs +++ /dev/null @@ -1,59 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ChatRequest.cs - -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace AutoGen.Ollama; - -public class ChatRequest -{ - /// - /// (required) the model name - /// - [JsonPropertyName("model")] - public string Model { get; set; } = string.Empty; - - /// - /// the messages of the chat, this can be used to keep a chat memory - /// - [JsonPropertyName("messages")] - public IList Messages { get; set; } = []; - - /// - /// the format to return a response in. Currently, the only accepted value is json - /// - [JsonPropertyName("format")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Format { get; set; } - - /// - /// additional model parameters listed in the documentation for the Modelfile such as temperature - /// - [JsonPropertyName("options")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public ModelReplyOptions? Options { get; set; } - /// - /// the prompt template to use (overrides what is defined in the Modelfile) - /// - [JsonPropertyName("template")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Template { get; set; } - /// - /// if false the response will be returned as a single response object, rather than a stream of objects - /// - [JsonPropertyName("stream")] - public bool Stream { get; set; } - /// - /// controls how long the model will stay loaded into memory following the request (default: 5m) - /// - [JsonPropertyName("keep_alive")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? KeepAlive { get; set; } -} diff --git a/dotnet/src/AutoGen.Ollama/DTOs/ChatResponse.cs b/dotnet/src/AutoGen.Ollama/DTOs/ChatResponse.cs deleted file mode 100644 index 392bca0ae3..0000000000 --- a/dotnet/src/AutoGen.Ollama/DTOs/ChatResponse.cs +++ /dev/null @@ -1,51 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ChatResponse.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.Ollama; - -public class ChatResponse : ChatResponseUpdate -{ - /// - /// time spent generating the response - /// - [JsonPropertyName("total_duration")] - public long TotalDuration { get; set; } - - /// - /// time spent in nanoseconds loading the model - /// - [JsonPropertyName("load_duration")] - public long LoadDuration { get; set; } - - /// - /// number of tokens in the prompt - /// - [JsonPropertyName("prompt_eval_count")] - public int PromptEvalCount { get; set; } - - /// - /// time spent in nanoseconds evaluating the prompt - /// - [JsonPropertyName("prompt_eval_duration")] - public long PromptEvalDuration { get; set; } - - /// - /// number of tokens the response - /// - [JsonPropertyName("eval_count")] - public int EvalCount { get; set; } - - /// - /// time in nanoseconds spent generating the response - /// - [JsonPropertyName("eval_duration")] - public long EvalDuration { get; set; } -} diff --git a/dotnet/src/AutoGen.Ollama/DTOs/ChatResponseUpdate.cs b/dotnet/src/AutoGen.Ollama/DTOs/ChatResponseUpdate.cs deleted file mode 100644 index 0f0fa478ea..0000000000 --- a/dotnet/src/AutoGen.Ollama/DTOs/ChatResponseUpdate.cs +++ /dev/null @@ -1,27 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ChatResponseUpdate.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.Ollama; - -public class ChatResponseUpdate -{ - [JsonPropertyName("model")] - public string Model { get; set; } = string.Empty; - - [JsonPropertyName("created_at")] - public string CreatedAt { get; set; } = string.Empty; - - [JsonPropertyName("message")] - public Message? Message { get; set; } - - [JsonPropertyName("done")] - public bool Done { get; set; } -} diff --git a/dotnet/src/AutoGen.Ollama/DTOs/Message.cs b/dotnet/src/AutoGen.Ollama/DTOs/Message.cs deleted file mode 100644 index b0fa7da1b9..0000000000 --- a/dotnet/src/AutoGen.Ollama/DTOs/Message.cs +++ /dev/null @@ -1,43 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Message.cs - -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace AutoGen.Ollama; - -public class Message -{ - public Message() - { - } - - public Message(string role, string value) - { - Role = role; - Value = value; - } - - /// - /// the role of the message, either system, user or assistant - /// - [JsonPropertyName("role")] - public string Role { get; set; } = string.Empty; - /// - /// the content of the message - /// - [JsonPropertyName("content")] - public string Value { get; set; } = string.Empty; - - /// - /// (optional): a list of images to include in the message (for multimodal models such as llava) - /// - [JsonPropertyName("images")] - public IList? Images { get; set; } -} diff --git a/dotnet/src/AutoGen.Ollama/DTOs/ModelReplyOptions.cs b/dotnet/src/AutoGen.Ollama/DTOs/ModelReplyOptions.cs deleted file mode 100644 index 1ddbe98cbc..0000000000 --- a/dotnet/src/AutoGen.Ollama/DTOs/ModelReplyOptions.cs +++ /dev/null @@ -1,135 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ModelReplyOptions.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.Ollama; - -//https://github.com/ollama/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values -public class ModelReplyOptions -{ - /// - /// Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0) - /// - [JsonPropertyName("mirostat")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public int? MiroStat { get; set; } - - /// - /// Influences how quickly the algorithm responds to feedback from the generated text. - /// A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive. (Default: 0.1) - /// - [JsonPropertyName("mirostat_eta")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public float? MiroStatEta { get; set; } - - /// - /// Controls the balance between coherence and diversity of the output. - /// A lower value will result in more focused and coherent text. (Default: 5.0) - /// - [JsonPropertyName("mirostat_tau")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public float? MiroStatTau { get; set; } - - /// - /// Sets the size of the context window used to generate the next token. (Default: 2048) - /// - [JsonPropertyName("num_ctx")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public int? NumCtx { get; set; } - - /// - /// The number of GQA groups in the transformer layer. Required for some models, for example it is 8 for llama2:70b - /// - [JsonPropertyName("num_gqa")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public int? NumGqa { get; set; } - - /// - /// The number of layers to send to the GPU(s). On macOS it defaults to 1 to enable metal support, 0 to disable. - /// - [JsonPropertyName("num_gpu")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public int? NumGpu { get; set; } - - /// - /// Sets the number of threads to use during computation. By default, Ollama will detect this for optimal performance. - /// It is recommended to set this value to the number of physical CPU cores your system has (as opposed to the logical number of cores). - /// - [JsonPropertyName("num_thread")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public int? NumThread { get; set; } - - /// - /// Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx) - /// - [JsonPropertyName("repeat_last_n")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public int? RepeatLastN { get; set; } - - /// - /// Sets how strongly to penalize repetitions. - /// A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1) - /// - [JsonPropertyName("repeat_penalty")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public float? RepeatPenalty { get; set; } - - /// - /// The temperature of the model. Increasing the temperature will make the model answer more creatively. (Default: 0.8) - /// - [JsonPropertyName("temperature")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public float? Temperature { get; set; } - - /// - /// Sets the random number seed to use for generation. - /// Setting this to a specific number will make the model generate the same text for the same prompt. (Default: 0) - /// - [JsonPropertyName("seed")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public int? Seed { get; set; } - - /// - /// Sets the stop sequences to use. When this pattern is encountered the LLM will stop generating text and return. - /// Multiple stop patterns may be set by specifying multiple separate stop parameters in a modelfile. - /// - [JsonPropertyName("stop")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Stop { get; set; } - - /// - /// Tail free sampling is used to reduce the impact of less probable tokens from the output. - /// A higher value (e.g., 2.0) will reduce the impact more, while a value of 1.0 disables this setting. (default: 1) - /// - [JsonPropertyName("tfs_z")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public float? TfsZ { get; set; } - - /// - /// Maximum number of tokens to predict when generating text. (Default: 128, -1 = infinite generation, -2 = fill context) - /// - [JsonPropertyName("num_predict")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public int? NumPredict { get; set; } - - /// - /// Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40) - /// - [JsonPropertyName("top_k")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public int? TopK { get; set; } - - /// - /// Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9) - /// - [JsonPropertyName("top_p")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public int? TopP { get; set; } -} diff --git a/dotnet/src/AutoGen.Ollama/DTOs/OllamaReplyOptions.cs b/dotnet/src/AutoGen.Ollama/DTOs/OllamaReplyOptions.cs deleted file mode 100644 index 0f80a0ebfc..0000000000 --- a/dotnet/src/AutoGen.Ollama/DTOs/OllamaReplyOptions.cs +++ /dev/null @@ -1,117 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OllamaReplyOptions.cs - -using AutoGen.Core; - -namespace AutoGen.Ollama; - -public enum FormatType -{ - None, - Json, -} - -public class OllamaReplyOptions : GenerateReplyOptions -{ - /// - /// the format to return a response in. Currently, the only accepted value is json - /// - public FormatType Format { get; set; } = FormatType.None; - - /// - /// the prompt template to use (overrides what is defined in the Modelfile) - /// - public string? Template { get; set; } - - /// - /// The temperature of the model. Increasing the temperature will make the model answer more creatively. (Default: 0.8) - /// - public new float? Temperature { get; set; } - - /// - /// controls how long the model will stay loaded into memory following the request (default: 5m) - /// - public string? KeepAlive { get; set; } - - /// - /// Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0) - /// - public int? MiroStat { get; set; } - - /// - /// Influences how quickly the algorithm responds to feedback from the generated text. - /// A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive. (Default: 0.1) - /// - public float? MiroStatEta { get; set; } - - /// - /// Controls the balance between coherence and diversity of the output. - /// A lower value will result in more focused and coherent text. (Default: 5.0) - /// - public float? MiroStatTau { get; set; } - - /// - /// Sets the size of the context window used to generate the next token. (Default: 2048) - /// - public int? NumCtx { get; set; } - - /// - /// The number of GQA groups in the transformer layer. Required for some models, for example it is 8 for llama2:70b - /// - public int? NumGqa { get; set; } - - /// - /// The number of layers to send to the GPU(s). On macOS it defaults to 1 to enable metal support, 0 to disable. - /// - public int? NumGpu { get; set; } - - /// - /// Sets the number of threads to use during computation. By default, Ollama will detect this for optimal performance. - /// It is recommended to set this value to the number of physical CPU cores your system has (as opposed to the logical number of cores). - /// - public int? NumThread { get; set; } - - /// - /// Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx) - /// - public int? RepeatLastN { get; set; } - - /// - /// Sets how strongly to penalize repetitions. - /// A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1) - /// - public float? RepeatPenalty { get; set; } - - /// - /// Sets the random number seed to use for generation. - /// Setting this to a specific number will make the model generate the same text for the same prompt. (Default: 0) - /// - public int? Seed { get; set; } - - /// - /// Tail free sampling is used to reduce the impact of less probable tokens from the output. - /// A higher value (e.g., 2.0) will reduce the impact more, while a value of 1.0 disables this setting. (default: 1) - /// - public float? TfsZ { get; set; } - - /// - /// Maximum number of tokens to predict when generating text. (Default: 128, -1 = infinite generation, -2 = fill context) - /// - public new int? MaxToken { get; set; } - - /// - /// Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40) - /// - public int? TopK { get; set; } - - /// - /// Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9) - /// - public int? TopP { get; set; } -} diff --git a/dotnet/src/AutoGen.Ollama/Embeddings/ITextEmbeddingService.cs b/dotnet/src/AutoGen.Ollama/Embeddings/ITextEmbeddingService.cs deleted file mode 100644 index b42cbd8530..0000000000 --- a/dotnet/src/AutoGen.Ollama/Embeddings/ITextEmbeddingService.cs +++ /dev/null @@ -1,18 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ITextEmbeddingService.cs - -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Ollama; - -public interface ITextEmbeddingService -{ - public Task GenerateAsync(TextEmbeddingsRequest request, CancellationToken cancellationToken); -} diff --git a/dotnet/src/AutoGen.Ollama/Embeddings/OllamaTextEmbeddingService.cs b/dotnet/src/AutoGen.Ollama/Embeddings/OllamaTextEmbeddingService.cs deleted file mode 100644 index 0fe0877f09..0000000000 --- a/dotnet/src/AutoGen.Ollama/Embeddings/OllamaTextEmbeddingService.cs +++ /dev/null @@ -1,50 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OllamaTextEmbeddingService.cs - -using System; -using System.IO; -using System.Net.Http; -using System.Text; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen.Ollama; - -public class OllamaTextEmbeddingService : ITextEmbeddingService -{ - private readonly HttpClient _client; - - public OllamaTextEmbeddingService(HttpClient client) - { - _client = client; - } - public async Task GenerateAsync(TextEmbeddingsRequest request, CancellationToken cancellationToken = default) - { - using (HttpResponseMessage? response = await _client - .SendAsync(BuildPostRequest(request), HttpCompletionOption.ResponseContentRead, cancellationToken)) - { - response.EnsureSuccessStatusCode(); - - Stream? streamResponse = await response.Content.ReadAsStreamAsync(); - TextEmbeddingsResponse output = await JsonSerializer - .DeserializeAsync(streamResponse, cancellationToken: cancellationToken) - ?? throw new Exception("Failed to deserialize response"); - return output; - } - } - private static HttpRequestMessage BuildPostRequest(TextEmbeddingsRequest request) - { - string serialized = JsonSerializer.Serialize(request); - return new HttpRequestMessage(HttpMethod.Post, OllamaConsts.EmbeddingsEndpoint) - { - Content = new StringContent(serialized, Encoding.UTF8, OllamaConsts.JsonMediaType) - }; - } -} diff --git a/dotnet/src/AutoGen.Ollama/Embeddings/TextEmbeddingsRequest.cs b/dotnet/src/AutoGen.Ollama/Embeddings/TextEmbeddingsRequest.cs deleted file mode 100644 index 672b5096be..0000000000 --- a/dotnet/src/AutoGen.Ollama/Embeddings/TextEmbeddingsRequest.cs +++ /dev/null @@ -1,38 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// TextEmbeddingsRequest.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.Ollama; - -public class TextEmbeddingsRequest -{ - /// - /// name of model to generate embeddings from - /// - [JsonPropertyName("model")] - public string Model { get; set; } = string.Empty; - /// - /// text to generate embeddings for - /// - [JsonPropertyName("prompt")] - public string Prompt { get; set; } = string.Empty; - /// - /// additional model parameters listed in the documentation for the Modelfile such as temperature - /// - [JsonPropertyName("options")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public ModelReplyOptions? Options { get; set; } - /// - /// controls how long the model will stay loaded into memory following the request (default: 5m) - /// - [JsonPropertyName("keep_alive")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? KeepAlive { get; set; } -} diff --git a/dotnet/src/AutoGen.Ollama/Embeddings/TextEmbeddingsResponse.cs b/dotnet/src/AutoGen.Ollama/Embeddings/TextEmbeddingsResponse.cs deleted file mode 100644 index 13a290b29c..0000000000 --- a/dotnet/src/AutoGen.Ollama/Embeddings/TextEmbeddingsResponse.cs +++ /dev/null @@ -1,18 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// TextEmbeddingsResponse.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.Ollama; - -public class TextEmbeddingsResponse -{ - [JsonPropertyName("embedding")] - public double[]? Embedding { get; set; } -} diff --git a/dotnet/src/AutoGen.Ollama/Extension/OllamaAgentExtension.cs b/dotnet/src/AutoGen.Ollama/Extension/OllamaAgentExtension.cs deleted file mode 100644 index afd4ec7abd..0000000000 --- a/dotnet/src/AutoGen.Ollama/Extension/OllamaAgentExtension.cs +++ /dev/null @@ -1,45 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OllamaAgentExtension.cs - -using AutoGen.Core; - -namespace AutoGen.Ollama.Extension; - -public static class OllamaAgentExtension -{ - /// - /// Register an to the - /// - /// the connector to use. If null, a new instance of will be created. - public static MiddlewareStreamingAgent RegisterMessageConnector( - this OllamaAgent agent, OllamaMessageConnector? connector = null) - { - if (connector == null) - { - connector = new OllamaMessageConnector(); - } - - return agent.RegisterStreamingMiddleware(connector); - } - - /// - /// Register an to the where T is - /// - /// the connector to use. If null, a new instance of will be created. - public static MiddlewareStreamingAgent RegisterMessageConnector( - this MiddlewareStreamingAgent agent, OllamaMessageConnector? connector = null) - { - if (connector == null) - { - connector = new OllamaMessageConnector(); - } - - return agent.RegisterStreamingMiddleware(connector); - } -} diff --git a/dotnet/src/AutoGen.Ollama/Middlewares/OllamaMessageConnector.cs b/dotnet/src/AutoGen.Ollama/Middlewares/OllamaMessageConnector.cs deleted file mode 100644 index acf722e78e..0000000000 --- a/dotnet/src/AutoGen.Ollama/Middlewares/OllamaMessageConnector.cs +++ /dev/null @@ -1,192 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OllamaMessageConnector.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Net.Http; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using AutoGen.Core; - -namespace AutoGen.Ollama; - -public class OllamaMessageConnector : IStreamingMiddleware -{ - public string Name => nameof(OllamaMessageConnector); - - public async Task InvokeAsync(MiddlewareContext context, IAgent agent, - CancellationToken cancellationToken = default) - { - var messages = ProcessMessage(context.Messages, agent); - IMessage reply = await agent.GenerateReplyAsync(messages, context.Options, cancellationToken); - - return reply switch - { - IMessage messageEnvelope when messageEnvelope.Content.Message?.Value is string content => new TextMessage(Role.Assistant, content, messageEnvelope.From), - IMessage messageEnvelope when messageEnvelope.Content.Message?.Value is null => throw new InvalidOperationException("Message content is null"), - _ => reply - }; - } - - public async IAsyncEnumerable InvokeAsync(MiddlewareContext context, IStreamingAgent agent, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var messages = ProcessMessage(context.Messages, agent); - var chunks = new List(); - await foreach (var update in agent.GenerateStreamingReplyAsync(messages, context.Options, cancellationToken)) - { - if (update is IMessage chatResponseUpdate) - { - var response = chatResponseUpdate.Content switch - { - _ when chatResponseUpdate.Content.Message?.Value is string content => new TextMessageUpdate(Role.Assistant, content, chatResponseUpdate.From), - _ => null, - }; - - if (response != null) - { - chunks.Add(chatResponseUpdate.Content); - yield return response; - } - } - else - { - yield return update; - } - } - - if (chunks.Count == 0) - { - yield break; - } - - // if the chunks are not empty, aggregate them into a single message - var messageContent = string.Join(string.Empty, chunks.Select(c => c.Message?.Value)); - var message = new TextMessage(Role.Assistant, messageContent, agent.Name); - - yield return message; - } - - private IEnumerable ProcessMessage(IEnumerable messages, IAgent agent) - { - return messages.SelectMany(m => - { - if (m is IMessage messageEnvelope) - { - return [m]; - } - else - { - return m switch - { - TextMessage textMessage => ProcessTextMessage(textMessage, agent), - ImageMessage imageMessage => ProcessImageMessage(imageMessage, agent), - MultiModalMessage multiModalMessage => ProcessMultiModalMessage(multiModalMessage, agent), - _ => [m], - }; - } - }); - } - - private IEnumerable ProcessMultiModalMessage(MultiModalMessage multiModalMessage, IAgent agent) - { - var textMessages = multiModalMessage.Content.Where(m => m is TextMessage textMessage && textMessage.GetContent() is not null); - var imageMessages = multiModalMessage.Content.Where(m => m is ImageMessage); - - // aggregate the text messages into one message - // by concatenating the content using newline - var textContent = string.Join("\n", textMessages.Select(m => ((TextMessage)m).Content)); - - // collect all the images - var images = imageMessages.SelectMany(m => ProcessImageMessage((ImageMessage)m, agent) - .SelectMany(m => (m as IMessage)?.Content.Images ?? [])); - - var message = new Message() - { - Role = "user", - Value = textContent, - Images = images.ToList(), - }; - - return [MessageEnvelope.Create(message, agent.Name)]; - } - - private IEnumerable ProcessImageMessage(ImageMessage imageMessage, IAgent agent) - { - byte[]? data = imageMessage.Data?.ToArray(); - if (data is null) - { - if (imageMessage.Url is null) - { - throw new InvalidOperationException("Invalid ImageMessage, the data or url must be provided"); - } - - var uri = new Uri(imageMessage.Url); - // download the image from the URL - using var client = new HttpClient(); - var response = client.GetAsync(uri).Result; - if (!response.IsSuccessStatusCode) - { - throw new HttpRequestException($"Failed to download the image from {uri}"); - } - - data = response.Content.ReadAsByteArrayAsync().Result; - } - - var base64Image = Convert.ToBase64String(data); - var message = imageMessage.From switch - { - null when imageMessage.Role == Role.User => new Message { Role = "user", Images = [base64Image] }, - null => throw new InvalidOperationException("Invalid Role, the role must be user"), - _ when imageMessage.From != agent.Name => new Message { Role = "user", Images = [base64Image] }, - _ => throw new InvalidOperationException("The from field must be null or the agent name"), - }; - - return [MessageEnvelope.Create(message, agent.Name)]; - } - - private IEnumerable ProcessTextMessage(TextMessage textMessage, IAgent agent) - { - if (textMessage.Role == Role.System) - { - var message = new Message - { - Role = "system", - Value = textMessage.Content - }; - - return [MessageEnvelope.Create(message, agent.Name)]; - } - else if (textMessage.From == agent.Name) - { - var message = new Message - { - Role = "assistant", - Value = textMessage.Content - }; - - return [MessageEnvelope.Create(message, agent.Name)]; - } - else - { - var message = textMessage.From switch - { - null when textMessage.Role == Role.User => new Message { Role = "user", Value = textMessage.Content }, - null when textMessage.Role == Role.Assistant => new Message { Role = "assistant", Value = textMessage.Content }, - null => throw new InvalidOperationException("Invalid Role"), - _ when textMessage.From != agent.Name => new Message { Role = "user", Value = textMessage.Content }, - _ => throw new InvalidOperationException("The from field must be null or the agent name"), - }; - - return [MessageEnvelope.Create(message, agent.Name)]; - } - } -} diff --git a/dotnet/src/AutoGen.Ollama/OllamaConsts.cs b/dotnet/src/AutoGen.Ollama/OllamaConsts.cs deleted file mode 100644 index 2bd095c8a8..0000000000 --- a/dotnet/src/AutoGen.Ollama/OllamaConsts.cs +++ /dev/null @@ -1,18 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OllamaConsts.cs - -namespace AutoGen.Ollama; - -public class OllamaConsts -{ - public const string JsonFormatType = "json"; - public const string JsonMediaType = "application/json"; - public const string ChatCompletionEndpoint = "/api/chat"; - public const string EmbeddingsEndpoint = "/api/embeddings"; -} diff --git a/dotnet/src/AutoGen.OpenAI.V1/Agent/GPTAgent.cs b/dotnet/src/AutoGen.OpenAI.V1/Agent/GPTAgent.cs deleted file mode 100644 index 6cc5c86af9..0000000000 --- a/dotnet/src/AutoGen.OpenAI.V1/Agent/GPTAgent.cs +++ /dev/null @@ -1,120 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GPTAgent.cs - -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; - -namespace AutoGen.OpenAI.V1; - -/// -/// GPT agent that can be used to connect to OpenAI chat models like GPT-3.5, GPT-4, etc. -/// supports the following message types as input: -/// - -/// - -/// - -/// - -/// - -/// - -/// - where T is -/// - where TMessage1 is and TMessage2 is -/// -/// returns the following message types: -/// - -/// - -/// - where TMessage1 is and TMessage2 is -/// -public class GPTAgent : IStreamingAgent -{ - private readonly OpenAIClient openAIClient; - private readonly IStreamingAgent _innerAgent; - - public GPTAgent( - string name, - string systemMessage, - ILLMConfig config, - float temperature = 0.7f, - int maxTokens = 1024, - int? seed = null, - ChatCompletionsResponseFormat? responseFormat = null, - IEnumerable? functions = null, - IDictionary>>? functionMap = null) - { - openAIClient = config switch - { - AzureOpenAIConfig azureConfig => new OpenAIClient(new Uri(azureConfig.Endpoint), new Azure.AzureKeyCredential(azureConfig.ApiKey)), - OpenAIConfig openAIConfig => new OpenAIClient(openAIConfig.ApiKey), - _ => throw new ArgumentException($"Unsupported config type {config.GetType()}"), - }; - - var modelName = config switch - { - AzureOpenAIConfig azureConfig => azureConfig.DeploymentName, - OpenAIConfig openAIConfig => openAIConfig.ModelId, - _ => throw new ArgumentException($"Unsupported config type {config.GetType()}"), - }; - - _innerAgent = new OpenAIChatAgent(openAIClient, name, modelName, systemMessage, temperature, maxTokens, seed, responseFormat, functions) - .RegisterMessageConnector(); - - if (functionMap is not null) - { - var functionMapMiddleware = new FunctionCallMiddleware(functionMap: functionMap); - _innerAgent = _innerAgent.RegisterStreamingMiddleware(functionMapMiddleware); - } - - Name = name; - } - - public GPTAgent( - string name, - string systemMessage, - OpenAIClient openAIClient, - string modelName, - float temperature = 0.7f, - int maxTokens = 1024, - int? seed = null, - ChatCompletionsResponseFormat? responseFormat = null, - IEnumerable? functions = null, - IDictionary>>? functionMap = null) - { - this.openAIClient = openAIClient; - Name = name; - - _innerAgent = new OpenAIChatAgent(openAIClient, name, modelName, systemMessage, temperature, maxTokens, seed, responseFormat, functions) - .RegisterMessageConnector(); - - if (functionMap is not null) - { - var functionMapMiddleware = new FunctionCallMiddleware(functionMap: functionMap); - _innerAgent = _innerAgent.RegisterStreamingMiddleware(functionMapMiddleware); - } - } - - public string Name { get; } - - public async Task GenerateReplyAsync( - IEnumerable messages, - GenerateReplyOptions? options = null, - CancellationToken cancellationToken = default) - { - return await _innerAgent.GenerateReplyAsync(messages, options, cancellationToken); - } - - public IAsyncEnumerable GenerateStreamingReplyAsync( - IEnumerable messages, - GenerateReplyOptions? options = null, - CancellationToken cancellationToken = default) - { - return _innerAgent.GenerateStreamingReplyAsync(messages, options, cancellationToken); - } -} diff --git a/dotnet/src/AutoGen.OpenAI.V1/Agent/OpenAIChatAgent.cs b/dotnet/src/AutoGen.OpenAI.V1/Agent/OpenAIChatAgent.cs deleted file mode 100644 index 57d5026db6..0000000000 --- a/dotnet/src/AutoGen.OpenAI.V1/Agent/OpenAIChatAgent.cs +++ /dev/null @@ -1,212 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIChatAgent.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; - -namespace AutoGen.OpenAI.V1; - -/// -/// OpenAI client agent. This agent is a thin wrapper around to provide a simple interface for chat completions. -/// To better work with other agents, it's recommended to use which supports more message types and have a better compatibility with other agents. -/// supports the following message types: -/// -/// -/// where T is : chat request message. -/// -/// -/// returns the following message types: -/// -/// -/// where T is : chat response message. -/// where T is : streaming chat completions update. -/// -/// -/// -public class OpenAIChatAgent : IStreamingAgent -{ - private readonly OpenAIClient openAIClient; - private readonly ChatCompletionsOptions options; - private readonly string systemMessage; - - /// - /// Create a new instance of . - /// - /// openai client - /// agent name - /// model name. e.g. gpt-turbo-3.5 - /// system message - /// temperature - /// max tokens to generated - /// response format, set it to to enable json mode. - /// seed to use, set it to enable deterministic output - /// functions - public OpenAIChatAgent( - OpenAIClient openAIClient, - string name, - string modelName, - string systemMessage = "You are a helpful AI assistant", - float temperature = 0.7f, - int maxTokens = 1024, - int? seed = null, - ChatCompletionsResponseFormat? responseFormat = null, - IEnumerable? functions = null) - : this( - openAIClient: openAIClient, - name: name, - options: CreateChatCompletionOptions(modelName, temperature, maxTokens, seed, responseFormat, functions), - systemMessage: systemMessage) - { - } - - /// - /// Create a new instance of . - /// - /// openai client - /// agent name - /// system message - /// chat completion option. The option can't contain messages - public OpenAIChatAgent( - OpenAIClient openAIClient, - string name, - ChatCompletionsOptions options, - string systemMessage = "You are a helpful AI assistant") - { - if (options.Messages is { Count: > 0 }) - { - throw new ArgumentException("Messages should not be provided in options"); - } - - this.openAIClient = openAIClient; - this.Name = name; - this.options = options; - this.systemMessage = systemMessage; - } - - public string Name { get; } - - public async Task GenerateReplyAsync( - IEnumerable messages, - GenerateReplyOptions? options = null, - CancellationToken cancellationToken = default) - { - var settings = this.CreateChatCompletionsOptions(options, messages); - var reply = await this.openAIClient.GetChatCompletionsAsync(settings, cancellationToken); - - return new MessageEnvelope(reply, from: this.Name); - } - - public async IAsyncEnumerable GenerateStreamingReplyAsync( - IEnumerable messages, - GenerateReplyOptions? options = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var settings = this.CreateChatCompletionsOptions(options, messages); - var response = await this.openAIClient.GetChatCompletionsStreamingAsync(settings, cancellationToken); - await foreach (var update in response.WithCancellation(cancellationToken)) - { - if (update.ChoiceIndex > 0) - { - throw new InvalidOperationException("Only one choice is supported in streaming response"); - } - - yield return new MessageEnvelope(update, from: this.Name); - } - } - - private ChatCompletionsOptions CreateChatCompletionsOptions(GenerateReplyOptions? options, IEnumerable messages) - { - var oaiMessages = messages.Select(m => m switch - { - IMessage chatRequestMessage => chatRequestMessage.Content, - _ => throw new ArgumentException("Invalid message type") - }); - - // add system message if there's no system message in messages - if (!oaiMessages.Any(m => m is ChatRequestSystemMessage)) - { - oaiMessages = new[] { new ChatRequestSystemMessage(systemMessage) }.Concat(oaiMessages); - } - - // clone the options by serializing and deserializing - var json = JsonSerializer.Serialize(this.options); - var settings = JsonSerializer.Deserialize(json) ?? throw new InvalidOperationException("Failed to clone options"); - - foreach (var m in oaiMessages) - { - settings.Messages.Add(m); - } - - settings.Temperature = options?.Temperature ?? settings.Temperature; - settings.MaxTokens = options?.MaxToken ?? settings.MaxTokens; - - foreach (var functions in this.options.Tools) - { - settings.Tools.Add(functions); - } - - foreach (var stopSequence in this.options.StopSequences) - { - settings.StopSequences.Add(stopSequence); - } - - var openAIFunctionDefinitions = options?.Functions?.Select(f => f.ToOpenAIFunctionDefinition()).ToList(); - if (openAIFunctionDefinitions is { Count: > 0 }) - { - foreach (var f in openAIFunctionDefinitions) - { - settings.Tools.Add(new ChatCompletionsFunctionToolDefinition(f)); - } - } - - if (options?.StopSequence is var sequence && sequence is { Length: > 0 }) - { - foreach (var seq in sequence) - { - settings.StopSequences.Add(seq); - } - } - - return settings; - } - - private static ChatCompletionsOptions CreateChatCompletionOptions( - string modelName, - float temperature = 0.7f, - int maxTokens = 1024, - int? seed = null, - ChatCompletionsResponseFormat? responseFormat = null, - IEnumerable? functions = null) - { - var options = new ChatCompletionsOptions(modelName, []) - { - Temperature = temperature, - MaxTokens = maxTokens, - Seed = seed, - ResponseFormat = responseFormat, - }; - - if (functions is not null) - { - foreach (var f in functions) - { - options.Tools.Add(new ChatCompletionsFunctionToolDefinition(f)); - } - } - - return options; - } -} diff --git a/dotnet/src/AutoGen.OpenAI.V1/AutoGen.OpenAI.V1.csproj b/dotnet/src/AutoGen.OpenAI.V1/AutoGen.OpenAI.V1.csproj deleted file mode 100644 index e3a2f41c8f..0000000000 --- a/dotnet/src/AutoGen.OpenAI.V1/AutoGen.OpenAI.V1.csproj +++ /dev/null @@ -1,25 +0,0 @@ -ο»Ώ - - $(PackageTargetFrameworks) - AutoGen.OpenAI - - - - - - - AutoGen.OpenAI - - OpenAI Intergration for AutoGen. - - - - - - - - - - - - diff --git a/dotnet/src/AutoGen.OpenAI.V1/AzureOpenAIConfig.cs b/dotnet/src/AutoGen.OpenAI.V1/AzureOpenAIConfig.cs deleted file mode 100644 index 6c67c44211..0000000000 --- a/dotnet/src/AutoGen.OpenAI.V1/AzureOpenAIConfig.cs +++ /dev/null @@ -1,29 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// AzureOpenAIConfig.cs - -namespace AutoGen.OpenAI.V1; - -public class AzureOpenAIConfig : ILLMConfig -{ - public AzureOpenAIConfig(string endpoint, string deploymentName, string apiKey, string? modelId = null) - { - this.Endpoint = endpoint; - this.DeploymentName = deploymentName; - this.ApiKey = apiKey; - this.ModelId = modelId; - } - - public string Endpoint { get; } - - public string DeploymentName { get; } - - public string ApiKey { get; } - - public string? ModelId { get; } -} diff --git a/dotnet/src/AutoGen.OpenAI.V1/Extension/FunctionContractExtension.cs b/dotnet/src/AutoGen.OpenAI.V1/Extension/FunctionContractExtension.cs deleted file mode 100644 index 6eee66c88b..0000000000 --- a/dotnet/src/AutoGen.OpenAI.V1/Extension/FunctionContractExtension.cs +++ /dev/null @@ -1,69 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// FunctionContractExtension.cs - -using System; -using System.Collections.Generic; -using Azure.AI.OpenAI; -using Json.Schema; -using Json.Schema.Generation; - -namespace AutoGen.OpenAI.V1.Extension; - -public static class FunctionContractExtension -{ - /// - /// Convert a to a that can be used in gpt funciton call. - /// - /// function contract - /// - public static FunctionDefinition ToOpenAIFunctionDefinition(this FunctionContract functionContract) - { - var functionDefinition = new FunctionDefinition - { - Name = functionContract.Name, - Description = functionContract.Description, - }; - var requiredParameterNames = new List(); - var propertiesSchemas = new Dictionary(); - var propertySchemaBuilder = new JsonSchemaBuilder().Type(SchemaValueType.Object); - foreach (var param in functionContract.Parameters ?? []) - { - if (param.Name is null) - { - throw new InvalidOperationException("Parameter name cannot be null"); - } - - var schemaBuilder = new JsonSchemaBuilder().FromType(param.ParameterType ?? throw new ArgumentNullException(nameof(param.ParameterType))); - if (param.Description != null) - { - schemaBuilder = schemaBuilder.Description(param.Description); - } - - if (param.IsRequired) - { - requiredParameterNames.Add(param.Name); - } - - var schema = schemaBuilder.Build(); - propertiesSchemas[param.Name] = schema; - - } - propertySchemaBuilder = propertySchemaBuilder.Properties(propertiesSchemas); - propertySchemaBuilder = propertySchemaBuilder.Required(requiredParameterNames); - - var option = new System.Text.Json.JsonSerializerOptions() - { - PropertyNamingPolicy = System.Text.Json.JsonNamingPolicy.CamelCase - }; - - functionDefinition.Parameters = BinaryData.FromObjectAsJson(propertySchemaBuilder.Build(), option); - - return functionDefinition; - } -} diff --git a/dotnet/src/AutoGen.OpenAI.V1/Extension/MessageExtension.cs b/dotnet/src/AutoGen.OpenAI.V1/Extension/MessageExtension.cs deleted file mode 100644 index 2eaa439bb6..0000000000 --- a/dotnet/src/AutoGen.OpenAI.V1/Extension/MessageExtension.cs +++ /dev/null @@ -1,237 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MessageExtension.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using Azure.AI.OpenAI; - -namespace AutoGen.OpenAI.V1; - -public static class MessageExtension -{ - public static string TEXT_CONTENT_TYPE = "text"; - public static string IMAGE_CONTENT_TYPE = "image"; - - [Obsolete("This method is deprecated, please replace Message with one of the built-in message types.")] - public static ChatRequestUserMessage ToChatRequestUserMessage(this Message message) - { - if (message.Value is ChatRequestUserMessage message1) - { - return message1; - } - else if (message?.Metadata is { Count: > 0 }) - { - var itemList = new List(); - foreach (var item in message.Metadata) - { - if (item.Key == TEXT_CONTENT_TYPE && item.Value is string txt) - { - itemList.Add(new ChatMessageTextContentItem(txt)); - } - else if (item.Key == IMAGE_CONTENT_TYPE && item.Value is string url) - { - itemList.Add(new ChatMessageImageContentItem(new Uri(url))); - } - } - - if (itemList.Count > 0) - { - return new ChatRequestUserMessage(itemList); - } - else - { - throw new ArgumentException("Content is null and metadata is null"); - } - } - else if (!string.IsNullOrEmpty(message?.Content)) - { - return new ChatRequestUserMessage(message!.Content); - } - - throw new ArgumentException("Content is null and metadata is null"); - } - - [Obsolete("This method is deprecated")] - public static IEnumerable ToOpenAIChatRequestMessage(this IAgent agent, IMessage message) - { - if (message is IMessage oaiMessage) - { - // short-circuit - return [oaiMessage.Content]; - } - - if (message.From != agent.Name) - { - if (message is TextMessage textMessage) - { - if (textMessage.Role == Role.System) - { - var msg = new ChatRequestSystemMessage(textMessage.Content); - - return [msg]; - } - else - { - var msg = new ChatRequestUserMessage(textMessage.Content); - return [msg]; - } - } - else if (message is ImageMessage imageMessage) - { - // multi-modal - var msg = new ChatRequestUserMessage(new ChatMessageImageContentItem(new Uri(imageMessage.Url ?? imageMessage.BuildDataUri()))); - - return [msg]; - } - else if (message is ToolCallMessage) - { - throw new ArgumentException($"ToolCallMessage is not supported when message.From is not the same with agent"); - } - else if (message is ToolCallResultMessage toolCallResult) - { - return toolCallResult.ToolCalls.Select(m => - { - var msg = new ChatRequestToolMessage(m.Result, m.FunctionName); - - return msg; - }); - } - else if (message is MultiModalMessage multiModalMessage) - { - var messageContent = multiModalMessage.Content.Select(m => - { - return m switch - { - TextMessage textMessage => new ChatMessageTextContentItem(textMessage.Content), - ImageMessage imageMessage => new ChatMessageImageContentItem(new Uri(imageMessage.Url ?? imageMessage.BuildDataUri())), - _ => throw new ArgumentException($"Unknown message type: {m.GetType()}") - }; - }); - - var msg = new ChatRequestUserMessage(messageContent); - return [msg]; - } - else if (message is AggregateMessage aggregateMessage) - { - // convert as user message - var resultMessage = aggregateMessage.Message2; - return resultMessage.ToolCalls.Select(m => new ChatRequestUserMessage(m.Result)); - } - else if (message is Message msg) - { - if (msg.Role == Role.System) - { - var systemMessage = new ChatRequestSystemMessage(msg.Content ?? string.Empty); - return [systemMessage]; - } - else if (msg.FunctionName is null && msg.FunctionArguments is null) - { - var userMessage = msg.ToChatRequestUserMessage(); - return [userMessage]; - } - else if (msg.FunctionName is not null && msg.FunctionArguments is not null && msg.Content is not null) - { - if (msg.Role == Role.Function) - { - return [new ChatRequestFunctionMessage(msg.FunctionName, msg.Content)]; - } - else - { - return [new ChatRequestUserMessage(msg.Content)]; - } - } - else - { - var userMessage = new ChatRequestUserMessage(msg.Content ?? throw new ArgumentException("Content is null")); - return [userMessage]; - } - } - else - { - throw new ArgumentException($"Unknown message type: {message.GetType()}"); - } - } - else - { - if (message is TextMessage textMessage) - { - if (textMessage.Role == Role.System) - { - throw new ArgumentException("System message is not supported when message.From is the same with agent"); - } - - - return [new ChatRequestAssistantMessage(textMessage.Content)]; - } - else if (message is ToolCallMessage toolCallMessage) - { - var assistantMessage = new ChatRequestAssistantMessage(string.Empty); - var toolCalls = toolCallMessage.ToolCalls.Select(tc => new ChatCompletionsFunctionToolCall(tc.FunctionName, tc.FunctionName, tc.FunctionArguments)); - foreach (var tc in toolCalls) - { - assistantMessage.ToolCalls.Add(tc); - } - - return [assistantMessage]; - } - else if (message is AggregateMessage aggregateMessage) - { - var toolCallMessage1 = aggregateMessage.Message1; - var toolCallResultMessage = aggregateMessage.Message2; - - var assistantMessage = new ChatRequestAssistantMessage(string.Empty); - var toolCalls = toolCallMessage1.ToolCalls.Select(tc => new ChatCompletionsFunctionToolCall(tc.FunctionName, tc.FunctionName, tc.FunctionArguments)); - foreach (var tc in toolCalls) - { - assistantMessage.ToolCalls.Add(tc); - } - - var toolCallResults = toolCallResultMessage.ToolCalls.Select(tc => new ChatRequestToolMessage(tc.Result, tc.FunctionName)); - - // return assistantMessage and tool call result messages - var messages = new List { assistantMessage }; - messages.AddRange(toolCallResults); - - return messages; - } - else if (message is Message msg) - { - if (msg.FunctionArguments is not null && msg.FunctionName is not null && msg.Content is not null) - { - var assistantMessage = new ChatRequestAssistantMessage(msg.Content); - assistantMessage.FunctionCall = new FunctionCall(msg.FunctionName, msg.FunctionArguments); - var functionCallMessage = new ChatRequestFunctionMessage(msg.FunctionName, msg.Content); - return [assistantMessage, functionCallMessage]; - } - else - { - if (msg.Role == Role.Function) - { - return [new ChatRequestFunctionMessage(msg.FunctionName!, msg.Content!)]; - } - else - { - var assistantMessage = new ChatRequestAssistantMessage(msg.Content!); - if (msg.FunctionName is not null && msg.FunctionArguments is not null) - { - assistantMessage.FunctionCall = new FunctionCall(msg.FunctionName, msg.FunctionArguments); - } - - return [assistantMessage]; - } - } - } - else - { - throw new ArgumentException($"Unknown message type: {message.GetType()}"); - } - } - } -} diff --git a/dotnet/src/AutoGen.OpenAI.V1/Extension/OpenAIAgentExtension.cs b/dotnet/src/AutoGen.OpenAI.V1/Extension/OpenAIAgentExtension.cs deleted file mode 100644 index 9eb22412ed..0000000000 --- a/dotnet/src/AutoGen.OpenAI.V1/Extension/OpenAIAgentExtension.cs +++ /dev/null @@ -1,43 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIAgentExtension.cs - -namespace AutoGen.OpenAI.V1.Extension; - -public static class OpenAIAgentExtension -{ - /// - /// Register an to the - /// - /// the connector to use. If null, a new instance of will be created. - public static MiddlewareStreamingAgent RegisterMessageConnector( - this OpenAIChatAgent agent, OpenAIChatRequestMessageConnector? connector = null) - { - if (connector == null) - { - connector = new OpenAIChatRequestMessageConnector(); - } - - return agent.RegisterStreamingMiddleware(connector); - } - - /// - /// Register an to the where T is - /// - /// the connector to use. If null, a new instance of will be created. - public static MiddlewareStreamingAgent RegisterMessageConnector( - this MiddlewareStreamingAgent agent, OpenAIChatRequestMessageConnector? connector = null) - { - if (connector == null) - { - connector = new OpenAIChatRequestMessageConnector(); - } - - return agent.RegisterStreamingMiddleware(connector); - } -} diff --git a/dotnet/src/AutoGen.OpenAI.V1/GlobalUsing.cs b/dotnet/src/AutoGen.OpenAI.V1/GlobalUsing.cs deleted file mode 100644 index 3c28defb3c..0000000000 --- a/dotnet/src/AutoGen.OpenAI.V1/GlobalUsing.cs +++ /dev/null @@ -1,10 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GlobalUsing.cs - -global using AutoGen.Core; diff --git a/dotnet/src/AutoGen.OpenAI.V1/Middleware/OpenAIChatRequestMessageConnector.cs b/dotnet/src/AutoGen.OpenAI.V1/Middleware/OpenAIChatRequestMessageConnector.cs deleted file mode 100644 index 5b5e5d9780..0000000000 --- a/dotnet/src/AutoGen.OpenAI.V1/Middleware/OpenAIChatRequestMessageConnector.cs +++ /dev/null @@ -1,393 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIChatRequestMessageConnector.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using Azure.AI.OpenAI; - -namespace AutoGen.OpenAI.V1; - -/// -/// This middleware converts the incoming to where T is before sending to agent. And converts the output to after receiving from agent. -/// Supported are -/// - -/// - -/// - -/// - -/// - -/// - where T is -/// - where TMessage1 is and TMessage2 is -/// -public class OpenAIChatRequestMessageConnector : IMiddleware, IStreamingMiddleware -{ - private bool strictMode = false; - - /// - /// Create a new instance of . - /// - /// If true, will throw an - /// When the message type is not supported. If false, it will ignore the unsupported message type. - public OpenAIChatRequestMessageConnector(bool strictMode = false) - { - this.strictMode = strictMode; - } - - public string? Name => nameof(OpenAIChatRequestMessageConnector); - - public async Task InvokeAsync(MiddlewareContext context, IAgent agent, CancellationToken cancellationToken = default) - { - var chatMessages = ProcessIncomingMessages(agent, context.Messages); - - var reply = await agent.GenerateReplyAsync(chatMessages, context.Options, cancellationToken); - - return PostProcessMessage(reply); - } - - public async IAsyncEnumerable InvokeAsync( - MiddlewareContext context, - IStreamingAgent agent, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var chatMessages = ProcessIncomingMessages(agent, context.Messages); - var streamingReply = agent.GenerateStreamingReplyAsync(chatMessages, context.Options, cancellationToken); - string? currentToolName = null; - await foreach (var reply in streamingReply) - { - if (reply is IMessage update) - { - if (update.Content.FunctionName is string functionName) - { - currentToolName = functionName; - } - else if (update.Content.ToolCallUpdate is StreamingFunctionToolCallUpdate toolCallUpdate && toolCallUpdate.Name is string toolCallName) - { - currentToolName = toolCallName; - } - var postProcessMessage = PostProcessStreamingMessage(update, currentToolName); - if (postProcessMessage != null) - { - yield return postProcessMessage; - } - } - else - { - if (this.strictMode) - { - throw new InvalidOperationException($"Invalid streaming message type {reply.GetType().Name}"); - } - else - { - yield return reply; - } - } - } - } - - public IMessage PostProcessMessage(IMessage message) - { - return message switch - { - IMessage m => PostProcessChatResponseMessage(m.Content, m.From), - IMessage m => PostProcessChatCompletions(m), - _ when strictMode is false => message, - _ => throw new InvalidOperationException($"Invalid return message type {message.GetType().Name}"), - }; - } - - public IMessage? PostProcessStreamingMessage(IMessage update, string? currentToolName) - { - if (update.Content.ContentUpdate is string contentUpdate) - { - // text message - return new TextMessageUpdate(Role.Assistant, contentUpdate, from: update.From); - } - else if (update.Content.FunctionName is string functionName) - { - return new ToolCallMessageUpdate(functionName, string.Empty, from: update.From); - } - else if (update.Content.FunctionArgumentsUpdate is string functionArgumentsUpdate && currentToolName is string) - { - return new ToolCallMessageUpdate(currentToolName, functionArgumentsUpdate, from: update.From); - } - else if (update.Content.ToolCallUpdate is StreamingFunctionToolCallUpdate tooCallUpdate && currentToolName is string) - { - return new ToolCallMessageUpdate(tooCallUpdate.Name ?? currentToolName, tooCallUpdate.ArgumentsUpdate, from: update.From); - } - else - { - return null; - } - } - - private IMessage PostProcessChatCompletions(IMessage message) - { - // throw exception if prompt filter results is not null - if (message.Content.Choices[0].FinishReason == CompletionsFinishReason.ContentFiltered) - { - throw new InvalidOperationException("The content is filtered because its potential risk. Please try another input."); - } - - return PostProcessChatResponseMessage(message.Content.Choices[0].Message, message.From); - } - - private IMessage PostProcessChatResponseMessage(ChatResponseMessage chatResponseMessage, string? from) - { - var textContent = chatResponseMessage.Content; - if (chatResponseMessage.FunctionCall is FunctionCall functionCall) - { - return new ToolCallMessage(functionCall.Name, functionCall.Arguments, from) - { - Content = textContent, - }; - } - - if (chatResponseMessage.ToolCalls.Where(tc => tc is ChatCompletionsFunctionToolCall).Any()) - { - var functionToolCalls = chatResponseMessage.ToolCalls - .Where(tc => tc is ChatCompletionsFunctionToolCall) - .Select(tc => (ChatCompletionsFunctionToolCall)tc); - - var toolCalls = functionToolCalls.Select(tc => new ToolCall(tc.Name, tc.Arguments) { ToolCallId = tc.Id }); - - return new ToolCallMessage(toolCalls, from) - { - Content = textContent, - }; - } - - if (textContent is string content && !string.IsNullOrEmpty(content)) - { - return new TextMessage(Role.Assistant, content, from); - } - - throw new InvalidOperationException("Invalid ChatResponseMessage"); - } - - public IEnumerable ProcessIncomingMessages(IAgent agent, IEnumerable messages) - { - return messages.SelectMany(m => - { - if (m is IMessage crm) - { - return [crm]; - } - else - { - var chatRequestMessages = m switch - { - TextMessage textMessage => ProcessTextMessage(agent, textMessage), - ImageMessage imageMessage when (imageMessage.From is null || imageMessage.From != agent.Name) => ProcessImageMessage(agent, imageMessage), - MultiModalMessage multiModalMessage when (multiModalMessage.From is null || multiModalMessage.From != agent.Name) => ProcessMultiModalMessage(agent, multiModalMessage), - ToolCallMessage toolCallMessage when (toolCallMessage.From is null || toolCallMessage.From == agent.Name) => ProcessToolCallMessage(agent, toolCallMessage), - ToolCallResultMessage toolCallResultMessage => ProcessToolCallResultMessage(toolCallResultMessage), - AggregateMessage aggregateMessage => ProcessFunctionCallMiddlewareMessage(agent, aggregateMessage), -#pragma warning disable CS0618 // deprecated - Message msg => ProcessMessage(agent, msg), -#pragma warning restore CS0618 // deprecated - _ when strictMode is false => [], - _ => throw new InvalidOperationException($"Invalid message type: {m.GetType().Name}"), - }; - - if (chatRequestMessages.Any()) - { - return chatRequestMessages.Select(cm => MessageEnvelope.Create(cm, m.From)); - } - else - { - return [m]; - } - } - }); - } - - [Obsolete("This method is deprecated, please use ProcessIncomingMessages(IAgent agent, IEnumerable messages) instead.")] - private IEnumerable ProcessIncomingMessagesForSelf(Message message) - { - if (message.Role == Role.System) - { - return new[] { new ChatRequestSystemMessage(message.Content) }; - } - else if (message.Content is string content && content is { Length: > 0 }) - { - if (message.FunctionName is null) - { - return new[] { new ChatRequestAssistantMessage(message.Content) }; - } - else - { - return new[] { new ChatRequestToolMessage(content, message.FunctionName) }; - } - } - else if (message.FunctionName is string functionName) - { - var msg = new ChatRequestAssistantMessage(content: null) - { - FunctionCall = new FunctionCall(functionName, message.FunctionArguments) - }; - - return new[] - { - msg, - }; - } - else - { - throw new InvalidOperationException("Invalid Message as message from self."); - } - } - - [Obsolete("This method is deprecated, please use ProcessIncomingMessages(IAgent agent, IEnumerable messages) instead.")] - private IEnumerable ProcessIncomingMessagesForOther(Message message) - { - if (message.Role == Role.System) - { - return [new ChatRequestSystemMessage(message.Content) { Name = message.From }]; - } - else if (message.Content is string content && content is { Length: > 0 }) - { - if (message.FunctionName is not null) - { - return new[] { new ChatRequestToolMessage(content, message.FunctionName) }; - } - - return [new ChatRequestUserMessage(message.Content) { Name = message.From }]; - } - else if (message.FunctionName is string _) - { - return [new ChatRequestUserMessage("// Message type is not supported") { Name = message.From }]; - } - else - { - throw new InvalidOperationException("Invalid Message as message from other."); - } - } - - private IEnumerable ProcessTextMessage(IAgent agent, TextMessage message) - { - if (message.Role == Role.System) - { - return [new ChatRequestSystemMessage(message.Content) { Name = message.From }]; - } - - if (agent.Name == message.From) - { - return [new ChatRequestAssistantMessage(message.Content) { Name = agent.Name }]; - } - else - { - return message.From switch - { - null when message.Role == Role.User => [new ChatRequestUserMessage(message.Content)], - null when message.Role == Role.Assistant => [new ChatRequestAssistantMessage(message.Content)], - null => throw new InvalidOperationException("Invalid Role"), - _ => [new ChatRequestUserMessage(message.Content) { Name = message.From }] - }; - } - } - - private IEnumerable ProcessImageMessage(IAgent agent, ImageMessage message) - { - if (agent.Name == message.From) - { - // image message from assistant is not supported - throw new ArgumentException("ImageMessage is not supported when message.From is the same with agent"); - } - - var imageContentItem = this.CreateChatMessageImageContentItemFromImageMessage(message); - return [new ChatRequestUserMessage([imageContentItem]) { Name = message.From }]; - } - - private IEnumerable ProcessMultiModalMessage(IAgent agent, MultiModalMessage message) - { - if (agent.Name == message.From) - { - // image message from assistant is not supported - throw new ArgumentException("MultiModalMessage is not supported when message.From is the same with agent"); - } - - IEnumerable items = message.Content.Select(ci => ci switch - { - TextMessage text => new ChatMessageTextContentItem(text.Content), - ImageMessage image => this.CreateChatMessageImageContentItemFromImageMessage(image), - _ => throw new NotImplementedException(), - }); - - return [new ChatRequestUserMessage(items) { Name = message.From }]; - } - - private ChatMessageImageContentItem CreateChatMessageImageContentItemFromImageMessage(ImageMessage message) - { - return message.Data is null && message.Url is not null - ? new ChatMessageImageContentItem(new Uri(message.Url)) - : new ChatMessageImageContentItem(message.Data, message.Data?.MediaType); - } - - private IEnumerable ProcessToolCallMessage(IAgent agent, ToolCallMessage message) - { - if (message.From is not null && message.From != agent.Name) - { - throw new ArgumentException("ToolCallMessage is not supported when message.From is not the same with agent"); - } - - var toolCall = message.ToolCalls.Select((tc, i) => new ChatCompletionsFunctionToolCall(tc.ToolCallId ?? $"{tc.FunctionName}_{i}", tc.FunctionName, tc.FunctionArguments)); - var textContent = message.GetContent() ?? string.Empty; - var chatRequestMessage = new ChatRequestAssistantMessage(textContent) { Name = message.From }; - foreach (var tc in toolCall) - { - chatRequestMessage.ToolCalls.Add(tc); - } - - return [chatRequestMessage]; - } - - private IEnumerable ProcessToolCallResultMessage(ToolCallResultMessage message) - { - return message.ToolCalls - .Where(tc => tc.Result is not null) - .Select((tc, i) => new ChatRequestToolMessage(tc.Result, tc.ToolCallId ?? $"{tc.FunctionName}_{i}")); - } - - [Obsolete("This method is deprecated, please use ProcessIncomingMessages(IAgent agent, IEnumerable messages) instead.")] - private IEnumerable ProcessMessage(IAgent agent, Message message) - { - if (message.From is not null && message.From != agent.Name) - { - return ProcessIncomingMessagesForOther(message); - } - else - { - return ProcessIncomingMessagesForSelf(message); - } - } - - private IEnumerable ProcessFunctionCallMiddlewareMessage(IAgent agent, AggregateMessage aggregateMessage) - { - if (aggregateMessage.From is not null && aggregateMessage.From != agent.Name) - { - // convert as user message - var resultMessage = aggregateMessage.Message2; - - return resultMessage.ToolCalls.Select(tc => new ChatRequestUserMessage(tc.Result) { Name = aggregateMessage.From }); - } - else - { - var toolCallMessage1 = aggregateMessage.Message1; - var toolCallResultMessage = aggregateMessage.Message2; - - var assistantMessage = this.ProcessToolCallMessage(agent, toolCallMessage1); - var toolCallResults = this.ProcessToolCallResultMessage(toolCallResultMessage); - - return assistantMessage.Concat(toolCallResults); - } - } -} diff --git a/dotnet/src/AutoGen.OpenAI.V1/OpenAIConfig.cs b/dotnet/src/AutoGen.OpenAI.V1/OpenAIConfig.cs deleted file mode 100644 index 167c4e7bd0..0000000000 --- a/dotnet/src/AutoGen.OpenAI.V1/OpenAIConfig.cs +++ /dev/null @@ -1,23 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIConfig.cs - -namespace AutoGen.OpenAI.V1; - -public class OpenAIConfig : ILLMConfig -{ - public OpenAIConfig(string apiKey, string modelId) - { - this.ApiKey = apiKey; - this.ModelId = modelId; - } - - public string ApiKey { get; } - - public string ModelId { get; } -} diff --git a/dotnet/src/AutoGen.SemanticKernel/AutoGen.SemanticKernel.csproj b/dotnet/src/AutoGen.SemanticKernel/AutoGen.SemanticKernel.csproj deleted file mode 100644 index 8769c3ac48..0000000000 --- a/dotnet/src/AutoGen.SemanticKernel/AutoGen.SemanticKernel.csproj +++ /dev/null @@ -1,28 +0,0 @@ -ο»Ώ - - - $(PackageTargetFrameworks) - AutoGen.SemanticKernel - $(NoWarn);SKEXP0110 - - - - - - - AutoGen.SemanticKernel - - This package contains the semantic kernel integration for AutoGen - - - - - - - - - - - - - diff --git a/dotnet/src/AutoGen.SemanticKernel/Extension/KernelExtension.cs b/dotnet/src/AutoGen.SemanticKernel/Extension/KernelExtension.cs deleted file mode 100644 index 9a2db0f404..0000000000 --- a/dotnet/src/AutoGen.SemanticKernel/Extension/KernelExtension.cs +++ /dev/null @@ -1,54 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// KernelExtension.cs - -using System.Linq; -using Microsoft.SemanticKernel; - -namespace AutoGen.SemanticKernel.Extension; - -public static class KernelExtension -{ - public static SemanticKernelAgent ToSemanticKernelAgent(this Kernel kernel, string name, string systemMessage = "You are a helpful AI assistant", PromptExecutionSettings? settings = null) - { - return new SemanticKernelAgent(kernel, name, systemMessage, settings); - } - - /// - /// Convert a to a - /// - /// kernel function metadata - public static FunctionContract ToFunctionContract(this KernelFunctionMetadata metadata) - { - return new FunctionContract() - { - Name = metadata.Name, - Description = metadata.Description, - Parameters = metadata.Parameters.Select(p => p.ToFunctionParameterContract()).ToList(), - ReturnType = metadata.ReturnParameter.ParameterType, - ReturnDescription = metadata.ReturnParameter.Description, - ClassName = metadata.PluginName, - }; - } - - /// - /// Convert a to a - /// - /// kernel parameter metadata - public static FunctionParameterContract ToFunctionParameterContract(this KernelParameterMetadata metadata) - { - return new FunctionParameterContract() - { - Name = metadata.Name, - Description = metadata.Description, - DefaultValue = metadata.DefaultValue, - IsRequired = metadata.IsRequired, - ParameterType = metadata.ParameterType, - }; - } -} diff --git a/dotnet/src/AutoGen.SemanticKernel/Extension/SemanticKernelAgentExtension.cs b/dotnet/src/AutoGen.SemanticKernel/Extension/SemanticKernelAgentExtension.cs deleted file mode 100644 index b758b8cef6..0000000000 --- a/dotnet/src/AutoGen.SemanticKernel/Extension/SemanticKernelAgentExtension.cs +++ /dev/null @@ -1,43 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// SemanticKernelAgentExtension.cs - -namespace AutoGen.SemanticKernel.Extension; - -public static class SemanticKernelAgentExtension -{ - /// - /// Register an to the - /// - /// the connector to use. If null, a new instance of will be created. - public static MiddlewareStreamingAgent RegisterMessageConnector( - this SemanticKernelAgent agent, SemanticKernelChatMessageContentConnector? connector = null) - { - if (connector == null) - { - connector = new SemanticKernelChatMessageContentConnector(); - } - - return agent.RegisterStreamingMiddleware(connector); - } - - /// - /// Register an to the where T is - /// - /// the connector to use. If null, a new instance of will be created. - public static MiddlewareStreamingAgent RegisterMessageConnector( - this MiddlewareStreamingAgent agent, SemanticKernelChatMessageContentConnector? connector = null) - { - if (connector == null) - { - connector = new SemanticKernelChatMessageContentConnector(); - } - - return agent.RegisterStreamingMiddleware(connector); - } -} diff --git a/dotnet/src/AutoGen.SemanticKernel/GlobalUsing.cs b/dotnet/src/AutoGen.SemanticKernel/GlobalUsing.cs deleted file mode 100644 index 3c28defb3c..0000000000 --- a/dotnet/src/AutoGen.SemanticKernel/GlobalUsing.cs +++ /dev/null @@ -1,10 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GlobalUsing.cs - -global using AutoGen.Core; diff --git a/dotnet/src/AutoGen.SemanticKernel/Middleware/KernelPluginMiddleware.cs b/dotnet/src/AutoGen.SemanticKernel/Middleware/KernelPluginMiddleware.cs deleted file mode 100644 index d19e7f0eda..0000000000 --- a/dotnet/src/AutoGen.SemanticKernel/Middleware/KernelPluginMiddleware.cs +++ /dev/null @@ -1,83 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// KernelPluginMiddleware.cs - -using System; -using System.Linq; -using System.Text.Json; -using System.Text.Json.Nodes; -using System.Threading; -using System.Threading.Tasks; -using AutoGen.SemanticKernel.Extension; -using Microsoft.SemanticKernel; - -namespace AutoGen.SemanticKernel; - -/// -/// A middleware that consumes -/// -public class KernelPluginMiddleware : IMiddleware -{ - private readonly KernelPlugin _kernelPlugin; - private readonly FunctionCallMiddleware _functionCallMiddleware; - public string? Name => nameof(KernelPluginMiddleware); - - public KernelPluginMiddleware(Kernel kernel, KernelPlugin kernelPlugin) - { - _kernelPlugin = kernelPlugin; - var functionContracts = kernelPlugin.Select(k => k.Metadata.ToFunctionContract()); - var functionMap = kernelPlugin.ToDictionary(kv => kv.Metadata.Name, kv => InvokeFunctionPartial(kernel, kv)); - _functionCallMiddleware = new FunctionCallMiddleware(functionContracts, functionMap, Name); - } - - public Task InvokeAsync(MiddlewareContext context, IAgent agent, CancellationToken cancellationToken = default) - { - return _functionCallMiddleware.InvokeAsync(context, agent, cancellationToken); - } - - private async Task InvokeFunctionAsync(Kernel kernel, KernelFunction function, string arguments) - { - var kernelArguments = new KernelArguments(); - var parameters = function.Metadata.Parameters; - var jsonObject = JsonSerializer.Deserialize(arguments) ?? new JsonObject(); - foreach (var parameter in parameters) - { - var parameterName = parameter.Name; - if (jsonObject.ContainsKey(parameterName)) - { - var parameterType = parameter.ParameterType ?? throw new ArgumentException($"Missing parameter type for {parameterName}"); - var parameterValue = jsonObject[parameterName]; - var parameterObject = parameterValue.Deserialize(parameterType); - kernelArguments.Add(parameterName, parameterObject); - } - else - { - if (parameter.DefaultValue != null) - { - kernelArguments.Add(parameterName, parameter.DefaultValue); - } - else if (parameter.IsRequired) - { - throw new ArgumentException($"Missing required parameter: {parameterName}"); - } - } - } - var result = await function.InvokeAsync(kernel, kernelArguments); - - return result.ToString(); - } - - private Func> InvokeFunctionPartial(Kernel kernel, KernelFunction function) - { - return async (string args) => - { - var result = await InvokeFunctionAsync(kernel, function, args); - return result.ToString(); - }; - } -} diff --git a/dotnet/src/AutoGen.SemanticKernel/Middleware/SemanticKernelChatMessageContentConnector.cs b/dotnet/src/AutoGen.SemanticKernel/Middleware/SemanticKernelChatMessageContentConnector.cs deleted file mode 100644 index 3819b85ad0..0000000000 --- a/dotnet/src/AutoGen.SemanticKernel/Middleware/SemanticKernelChatMessageContentConnector.cs +++ /dev/null @@ -1,262 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// SemanticKernelChatMessageContentConnector.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; - -namespace AutoGen.SemanticKernel; - -/// -/// This middleware converts the incoming to before passing to agent. -/// And converts the reply message from to before returning to the caller. -/// -/// requirement for agent -/// - Input message type: where T is -/// - Reply message type: where T is -/// - (streaming) Reply message type: where T is -/// -/// This middleware supports the following message types: -/// - -/// - -/// - -/// -/// This middleware returns the following message types: -/// - -/// - -/// - -/// - (streaming) -/// -public class SemanticKernelChatMessageContentConnector : IMiddleware, IStreamingMiddleware -{ - public string? Name => nameof(SemanticKernelChatMessageContentConnector); - - public async Task InvokeAsync(MiddlewareContext context, IAgent agent, CancellationToken cancellationToken = default) - { - var messages = context.Messages; - - var chatMessageContents = ProcessMessage(messages, agent) - .Select(m => new MessageEnvelope(m)); - var reply = await agent.GenerateReplyAsync(chatMessageContents, context.Options, cancellationToken); - - return PostProcessMessage(reply); - } - - public async IAsyncEnumerable InvokeAsync(MiddlewareContext context, IStreamingAgent agent, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var chatMessageContents = ProcessMessage(context.Messages, agent) - .Select(m => new MessageEnvelope(m)); - - await foreach (var reply in agent.GenerateStreamingReplyAsync(chatMessageContents, context.Options, cancellationToken)) - { - yield return PostProcessStreamingMessage(reply); - } - } - - private IMessage PostProcessMessage(IMessage input) - { - return input switch - { - IMessage messageEnvelope => PostProcessMessage(messageEnvelope), - _ => input, - }; - } - - private IMessage PostProcessStreamingMessage(IMessage input) - { - return input switch - { - IMessage streamingMessage => PostProcessMessage(streamingMessage), - IMessage msg => PostProcessMessage(msg), - _ => input, - }; - } - - private IMessage PostProcessMessage(IMessage messageEnvelope) - { - var chatMessageContent = messageEnvelope.Content; - var items = chatMessageContent.Items.Select(i => i switch - { - TextContent txt => new TextMessage(Role.Assistant, txt.Text!, messageEnvelope.From), - ImageContent img when img.Uri is Uri uri => new ImageMessage(Role.Assistant, uri.ToString(), from: messageEnvelope.From), - ImageContent img when img.Data is ReadOnlyMemory data => new ImageMessage(Role.Assistant, BinaryData.FromBytes(data), from: messageEnvelope.From), - _ => throw new InvalidOperationException("Unsupported content type"), - }); - - if (items.Count() == 1) - { - return items.First(); - } - else - { - return new MultiModalMessage(Role.Assistant, items, from: messageEnvelope.From); - } - } - - private IMessage PostProcessMessage(IMessage streamingMessage) - { - var chatMessageContent = streamingMessage.Content; - if (chatMessageContent.ChoiceIndex > 0) - { - throw new InvalidOperationException("Only one choice is supported in streaming response"); - } - return new TextMessageUpdate(Role.Assistant, chatMessageContent.Content, streamingMessage.From); - } - - private IEnumerable ProcessMessage(IEnumerable messages, IAgent agent) - { - return messages.SelectMany(m => - { - if (m is IMessage chatMessageContent) - { - return [chatMessageContent.Content]; - } - if (m.From == agent.Name) - { - return ProcessMessageForSelf(m); - } - else - { - return ProcessMessageForOthers(m); - } - }); - } - - private IEnumerable ProcessMessageForSelf(IMessage message) - { - return message switch - { - TextMessage textMessage => ProcessMessageForSelf(textMessage), - MultiModalMessage multiModalMessage => ProcessMessageForSelf(multiModalMessage), -#pragma warning disable CS0618 // deprecated - Message m => ProcessMessageForSelf(m), -#pragma warning restore CS0618 // deprecated - _ => throw new System.NotImplementedException(), - }; - } - - private IEnumerable ProcessMessageForOthers(IMessage message) - { - return message switch - { - TextMessage textMessage => ProcessMessageForOthers(textMessage), - MultiModalMessage multiModalMessage => ProcessMessageForOthers(multiModalMessage), - ImageMessage imageMessage => ProcessMessageForOthers(imageMessage), -#pragma warning disable CS0618 // deprecated - Message m => ProcessMessageForOthers(m), -#pragma warning restore CS0618 // deprecated - _ => throw new InvalidOperationException("unsupported message type, only support TextMessage, ImageMessage, MultiModalMessage and Message."), - }; - } - - private IEnumerable ProcessMessageForSelf(TextMessage message) - { - if (message.Role == Role.System) - { - return [new ChatMessageContent(AuthorRole.System, message.Content)]; - } - else - { - return [new ChatMessageContent(AuthorRole.Assistant, message.Content)]; - } - } - - - private IEnumerable ProcessMessageForOthers(TextMessage message) - { - if (message.Role == Role.System) - { - return [new ChatMessageContent(AuthorRole.System, message.Content)]; - } - else - { - return [new ChatMessageContent(AuthorRole.User, message.Content)]; - } - } - - private IEnumerable ProcessMessageForOthers(ImageMessage message) - { - var collectionItems = new ChatMessageContentItemCollection(); - collectionItems.Add(new ImageContent(new Uri(message.Url ?? message.BuildDataUri()))); - return [new ChatMessageContent(AuthorRole.User, collectionItems)]; - } - - private IEnumerable ProcessMessageForSelf(MultiModalMessage message) - { - throw new System.InvalidOperationException("MultiModalMessage is not supported in the semantic kernel if it's from self."); - } - - private IEnumerable ProcessMessageForOthers(MultiModalMessage message) - { - var collections = new ChatMessageContentItemCollection(); - foreach (var item in message.Content) - { - if (item is TextMessage textContent) - { - collections.Add(new TextContent(textContent.Content)); - } - else if (item is ImageMessage imageContent) - { - collections.Add(new ImageContent(new Uri(imageContent.Url ?? imageContent.BuildDataUri()))); - } - else - { - throw new InvalidOperationException($"Unsupported message type: {item.GetType().Name}"); - } - } - return [new ChatMessageContent(AuthorRole.User, collections)]; - } - - [Obsolete("This method is deprecated, please use the specific method instead.")] - private IEnumerable ProcessMessageForSelf(Message message) - { - if (message.Role == Role.System) - { - return [new ChatMessageContent(AuthorRole.System, message.Content)]; - } - else if (message.Content is string && message.FunctionName is null && message.FunctionArguments is null) - { - return [new ChatMessageContent(AuthorRole.Assistant, message.Content)]; - } - else if (message.Content is null && message.FunctionName is not null && message.FunctionArguments is not null) - { - throw new System.InvalidOperationException("Function call is not supported in the semantic kernel if it's from self."); - } - else - { - throw new System.InvalidOperationException("Unsupported message type"); - } - } - - [Obsolete("This method is deprecated, please use the specific method instead.")] - private IEnumerable ProcessMessageForOthers(Message message) - { - if (message.Role == Role.System) - { - return [new ChatMessageContent(AuthorRole.System, message.Content)]; - } - else if (message.Content is string && message.FunctionName is null && message.FunctionArguments is null) - { - return [new ChatMessageContent(AuthorRole.User, message.Content)]; - } - else if (message.Content is null && message.FunctionName is not null && message.FunctionArguments is not null) - { - throw new System.InvalidOperationException("Function call is not supported in the semantic kernel if it's from others."); - } - else - { - throw new System.InvalidOperationException("Unsupported message type"); - } - } -} diff --git a/dotnet/src/AutoGen.SemanticKernel/SemanticKernelAgent.cs b/dotnet/src/AutoGen.SemanticKernel/SemanticKernelAgent.cs deleted file mode 100644 index 058484deb5..0000000000 --- a/dotnet/src/AutoGen.SemanticKernel/SemanticKernelAgent.cs +++ /dev/null @@ -1,127 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// SemanticKernelAgent.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; - -namespace AutoGen.SemanticKernel; - -/// -/// Semantic Kernel Agent -/// Income message could be one of the following type: -/// -/// where T is -/// -/// -/// Return message could be one of the following type: -/// -/// where T is -/// (streaming) where T is -/// -/// -/// To support more AutoGen built-in , register with . -/// -public class SemanticKernelAgent : IStreamingAgent -{ - private readonly Kernel _kernel; - private readonly string _systemMessage; - private readonly PromptExecutionSettings? _settings; - - public SemanticKernelAgent( - Kernel kernel, - string name, - string systemMessage = "You are a helpful AI assistant", - PromptExecutionSettings? settings = null) - { - _kernel = kernel; - this.Name = name; - _systemMessage = systemMessage; - _settings = settings; - } - - public string Name { get; } - - - public async Task GenerateReplyAsync(IEnumerable messages, GenerateReplyOptions? options = null, CancellationToken cancellationToken = default) - { - var chatHistory = BuildChatHistory(messages); - var option = BuildOption(options); - var chatService = _kernel.GetRequiredService(); - - var reply = await chatService.GetChatMessageContentsAsync(chatHistory, option, _kernel, cancellationToken); - - if (reply.Count > 1) - { - throw new InvalidOperationException("ResultsPerPrompt greater than 1 is not supported in this semantic kernel agent"); - } - - return new MessageEnvelope(reply.First(), from: this.Name); - } - - public async IAsyncEnumerable GenerateStreamingReplyAsync( - IEnumerable messages, - GenerateReplyOptions? options = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var chatHistory = BuildChatHistory(messages); - var option = BuildOption(options); - var chatService = _kernel.GetRequiredService(); - var response = chatService.GetStreamingChatMessageContentsAsync(chatHistory, option, _kernel, cancellationToken); - - await foreach (var content in response) - { - if (content.ChoiceIndex > 0) - { - throw new InvalidOperationException("Only one choice is supported in streaming response"); - } - - yield return new MessageEnvelope(content, from: this.Name); - } - } - - private ChatHistory BuildChatHistory(IEnumerable messages) - { - var chatMessageContents = ProcessMessage(messages); - // if there's no system message in chatMessageContents, add one to the beginning - if (!chatMessageContents.Any(c => c.Role == AuthorRole.System)) - { - chatMessageContents = new[] { new ChatMessageContent(AuthorRole.System, _systemMessage) }.Concat(chatMessageContents); - } - - return new ChatHistory(chatMessageContents); - } - - private PromptExecutionSettings BuildOption(GenerateReplyOptions? options) - { - return _settings ?? new OpenAIPromptExecutionSettings - { - Temperature = options?.Temperature ?? 0.7f, - MaxTokens = options?.MaxToken ?? 1024, - StopSequences = options?.StopSequence, - ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions, - ResultsPerPrompt = 1, - }; - } - - private IEnumerable ProcessMessage(IEnumerable messages) - { - return messages.Select(m => m switch - { - IMessage cmc => cmc.Content, - _ => throw new ArgumentException("Invalid message type") - }); - } -} diff --git a/dotnet/src/AutoGen.SemanticKernel/SemanticKernelChatCompletionAgent.cs b/dotnet/src/AutoGen.SemanticKernel/SemanticKernelChatCompletionAgent.cs deleted file mode 100644 index f983e40417..0000000000 --- a/dotnet/src/AutoGen.SemanticKernel/SemanticKernelChatCompletionAgent.cs +++ /dev/null @@ -1,57 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// SemanticKernelChatCompletionAgent.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; -using Microsoft.SemanticKernel.ChatCompletion; - -namespace AutoGen.SemanticKernel; - -public class SemanticKernelChatCompletionAgent : IAgent -{ - public string Name { get; } - private readonly ChatCompletionAgent _chatCompletionAgent; - - public SemanticKernelChatCompletionAgent(ChatCompletionAgent chatCompletionAgent) - { - this.Name = chatCompletionAgent.Name ?? throw new ArgumentNullException(nameof(chatCompletionAgent.Name)); - this._chatCompletionAgent = chatCompletionAgent; - } - - public async Task GenerateReplyAsync(IEnumerable messages, GenerateReplyOptions? options = null, - CancellationToken cancellationToken = default) - { - ChatMessageContent[] reply = await _chatCompletionAgent - .InvokeAsync(BuildChatHistory(messages), cancellationToken) - .ToArrayAsync(cancellationToken: cancellationToken); - - return reply.Length > 1 - ? throw new InvalidOperationException("ResultsPerPrompt greater than 1 is not supported in this semantic kernel agent") - : new MessageEnvelope(reply[0], from: this.Name); - } - - private ChatHistory BuildChatHistory(IEnumerable messages) - { - return new ChatHistory(ProcessMessage(messages)); - } - - private IEnumerable ProcessMessage(IEnumerable messages) - { - return messages.Select(m => m switch - { - IMessage cmc => cmc.Content, - _ => throw new ArgumentException("Invalid message type") - }); - } -} diff --git a/dotnet/src/AutoGen.SourceGenerator/AutoGen.SourceGenerator.csproj b/dotnet/src/AutoGen.SourceGenerator/AutoGen.SourceGenerator.csproj deleted file mode 100644 index 37f344ed11..0000000000 --- a/dotnet/src/AutoGen.SourceGenerator/AutoGen.SourceGenerator.csproj +++ /dev/null @@ -1,64 +0,0 @@ -ο»Ώ - - - netstandard2.0 - false - - true - - 35954224-b94e-4024-b0ef-7ba7cf80c0d8 - $(GetTargetPathDependsOn);GetDependencyTargetPaths - false - $(NoWarn);NU5128 - $(DefineConstants);LAUNCH_DEBUGGER - - - - - - - AutoGen.SourceGenerator - Source generator for AutoGen. This package provides type-safe function call to AutoGen agents. - - - - - - - - - - - - - - - - - - - - - - - TextTemplatingFilePreprocessor - FunctionCallTemplate.cs - - - - - - - - - - - - - - True - True - FunctionCallTemplate.tt - - - diff --git a/dotnet/src/AutoGen.SourceGenerator/DocumentCommentExtension.cs b/dotnet/src/AutoGen.SourceGenerator/DocumentCommentExtension.cs deleted file mode 100644 index 553d3a6702..0000000000 --- a/dotnet/src/AutoGen.SourceGenerator/DocumentCommentExtension.cs +++ /dev/null @@ -1,301 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// DocumentCommentExtension.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Xml.Linq; -using Microsoft.CodeAnalysis; -using Microsoft.CodeAnalysis.CSharp; -using Microsoft.CodeAnalysis.CSharp.Syntax; - -// copyright: https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/StyleCop.Analyzers/StyleCop.Analyzers/Helpers/DocumentationCommentExtensions.cs#L17 -namespace AutoGen.SourceGenerator -{ - internal static class DocumentCommentExtension - { - public static bool IsMissingOrDefault(this SyntaxToken token) - { - return token.IsKind(SyntaxKind.None) - || token.IsMissing; - } - - public static string? GetParameterDescriptionFromDocumentationCommentTriviaSyntax(this DocumentationCommentTriviaSyntax documentationCommentTrivia, string parameterName) - { - var parameterElements = documentationCommentTrivia.Content.GetXmlElements("param"); - - var parameter = parameterElements.FirstOrDefault(element => - { - var xml = XElement.Parse(element.ToString()); - var nameAttribute = xml.Attribute("name"); - return nameAttribute != null && nameAttribute.Value == parameterName; - }); - - if (parameter is not null) - { - var xml = XElement.Parse(parameter.ToString()); - - return xml.Nodes().OfType().FirstOrDefault()?.Value; - } - - return null; - } - - public static string? GetNamespaceNameFromClassDeclarationSyntax(this ClassDeclarationSyntax classDeclaration) - { - return classDeclaration.Parent is NamespaceDeclarationSyntax namespaceDeclarationSyntax ? namespaceDeclarationSyntax.Name.ToString() - : classDeclaration.Parent is FileScopedNamespaceDeclarationSyntax fileScopedNamespaceDeclarationSyntax ? fileScopedNamespaceDeclarationSyntax.Name.ToString() - : null; - } - - public static DocumentationCommentTriviaSyntax? GetDocumentationCommentTriviaSyntax(this SyntaxNode node) - { - if (node == null) - { - return null; - } - - foreach (var leadingTrivia in node.GetLeadingTrivia()) - { - if (leadingTrivia.GetStructure() is DocumentationCommentTriviaSyntax structure) - { - return structure; - } - } - - return null; - } - - public static XmlNodeSyntax GetFirstXmlElement(this SyntaxList content, string elementName) - { - return content.GetXmlElements(elementName).FirstOrDefault(); - } - - public static IEnumerable GetXmlElements(this SyntaxList content, string elementName) - { - foreach (XmlNodeSyntax syntax in content) - { - if (syntax is XmlEmptyElementSyntax emptyElement) - { - if (string.Equals(elementName, emptyElement.Name.ToString(), StringComparison.Ordinal)) - { - yield return emptyElement; - } - - continue; - } - - if (syntax is XmlElementSyntax elementSyntax) - { - if (string.Equals(elementName, elementSyntax.StartTag?.Name?.ToString(), StringComparison.Ordinal)) - { - yield return elementSyntax; - } - - continue; - } - } - } - - public static T ReplaceExteriorTrivia(this T node, SyntaxTrivia trivia) - where T : XmlNodeSyntax - { - // Make sure to include a space after the '///' characters. - SyntaxTrivia triviaWithSpace = SyntaxFactory.DocumentationCommentExterior(trivia.ToString() + " "); - - return node.ReplaceTrivia( - node.DescendantTrivia(descendIntoTrivia: true).Where(i => i.IsKind(SyntaxKind.DocumentationCommentExteriorTrivia)), - (originalTrivia, rewrittenTrivia) => SelectExteriorTrivia(rewrittenTrivia, trivia, triviaWithSpace)); - } - - public static SyntaxList WithoutFirstAndLastNewlines(this SyntaxList summaryContent) - { - if (summaryContent.Count == 0) - { - return summaryContent; - } - - if (!(summaryContent[0] is XmlTextSyntax firstSyntax)) - { - return summaryContent; - } - - if (!(summaryContent[summaryContent.Count - 1] is XmlTextSyntax lastSyntax)) - { - return summaryContent; - } - - SyntaxTokenList firstSyntaxTokens = firstSyntax.TextTokens; - - int removeFromStart; - if (IsXmlNewLine(firstSyntaxTokens[0])) - { - removeFromStart = 1; - } - else - { - if (!IsXmlWhitespace(firstSyntaxTokens[0])) - { - return summaryContent; - } - - if (!IsXmlNewLine(firstSyntaxTokens[1])) - { - return summaryContent; - } - - removeFromStart = 2; - } - - SyntaxTokenList lastSyntaxTokens = lastSyntax.TextTokens; - - int removeFromEnd; - if (IsXmlNewLine(lastSyntaxTokens[lastSyntaxTokens.Count - 1])) - { - removeFromEnd = 1; - } - else - { - if (!IsXmlWhitespace(lastSyntaxTokens[lastSyntaxTokens.Count - 1])) - { - return summaryContent; - } - - if (!IsXmlNewLine(lastSyntaxTokens[lastSyntaxTokens.Count - 2])) - { - return summaryContent; - } - - removeFromEnd = 2; - } - - for (int i = 0; i < removeFromStart; i++) - { - firstSyntaxTokens = firstSyntaxTokens.RemoveAt(0); - } - - if (firstSyntax == lastSyntax) - { - lastSyntaxTokens = firstSyntaxTokens; - } - - for (int i = 0; i < removeFromEnd; i++) - { - if (!lastSyntaxTokens.Any()) - { - break; - } - - lastSyntaxTokens = lastSyntaxTokens.RemoveAt(lastSyntaxTokens.Count - 1); - } - - summaryContent = summaryContent.RemoveAt(summaryContent.Count - 1); - if (lastSyntaxTokens.Count != 0) - { - summaryContent = summaryContent.Add(lastSyntax.WithTextTokens(lastSyntaxTokens)); - } - - if (firstSyntax != lastSyntax) - { - summaryContent = summaryContent.RemoveAt(0); - if (firstSyntaxTokens.Count != 0) - { - summaryContent = summaryContent.Insert(0, firstSyntax.WithTextTokens(firstSyntaxTokens)); - } - } - - if (summaryContent.Count > 0) - { - // Make sure to remove the leading trivia - summaryContent = summaryContent.Replace(summaryContent[0], summaryContent[0].WithLeadingTrivia()); - - // Remove leading spaces (between the start tag and the start of the paragraph content) - if (summaryContent[0] is XmlTextSyntax firstTextSyntax && firstTextSyntax.TextTokens.Count > 0) - { - SyntaxToken firstTextToken = firstTextSyntax.TextTokens[0]; - string firstTokenText = firstTextToken.Text; - string trimmed = firstTokenText.TrimStart(); - if (trimmed != firstTokenText) - { - SyntaxToken newFirstToken = SyntaxFactory.Token( - firstTextToken.LeadingTrivia, - firstTextToken.Kind(), - trimmed, - firstTextToken.ValueText.TrimStart(), - firstTextToken.TrailingTrivia); - - summaryContent = summaryContent.Replace(firstTextSyntax, firstTextSyntax.ReplaceToken(firstTextToken, newFirstToken)); - } - } - } - - return summaryContent; - } - - public static bool IsXmlNewLine(this SyntaxToken node) - { - return node.IsKind(SyntaxKind.XmlTextLiteralNewLineToken); - } - - public static bool IsXmlWhitespace(this SyntaxToken node) - { - return node.IsKind(SyntaxKind.XmlTextLiteralToken) - && string.IsNullOrWhiteSpace(node.Text); - } - - /// - /// Adjust the leading and trailing trivia associated with - /// tokens to ensure the formatter properly indents the exterior trivia. - /// - /// The type of syntax node. - /// The syntax node to adjust tokens. - /// A equivalent to the input , adjusted by moving any - /// trailing trivia from tokens to be leading trivia of the - /// following token. - public static T AdjustDocumentationCommentNewLineTrivia(this T node) - where T : SyntaxNode - { - var tokensForAdjustment = - from token in node.DescendantTokens() - where token.IsKind(SyntaxKind.XmlTextLiteralNewLineToken) - where token.HasTrailingTrivia - let next = token.GetNextToken(includeZeroWidth: true, includeSkipped: true, includeDirectives: true, includeDocumentationComments: true) - where !next.IsMissingOrDefault() - select new KeyValuePair(token, next); - - Dictionary replacements = new Dictionary(); - foreach (var pair in tokensForAdjustment) - { - replacements[pair.Key] = pair.Key.WithTrailingTrivia(); - replacements[pair.Value] = pair.Value.WithLeadingTrivia(pair.Value.LeadingTrivia.InsertRange(0, pair.Key.TrailingTrivia)); - } - - return node.ReplaceTokens(replacements.Keys, (originalToken, rewrittenToken) => replacements[originalToken]); - } - - public static XmlNameSyntax? GetName(this XmlNodeSyntax element) - { - return (element as XmlElementSyntax)?.StartTag?.Name - ?? (element as XmlEmptyElementSyntax)?.Name; - } - - private static SyntaxTrivia SelectExteriorTrivia(SyntaxTrivia rewrittenTrivia, SyntaxTrivia trivia, SyntaxTrivia triviaWithSpace) - { - // if the trivia had a trailing space, make sure to preserve it - if (rewrittenTrivia.ToString().EndsWith(" ")) - { - return triviaWithSpace; - } - - // otherwise the space is part of the leading trivia of the following token, so don't add an extra one to - // the exterior trivia - return trivia; - } - } -} diff --git a/dotnet/src/AutoGen.SourceGenerator/FunctionCallGenerator.cs b/dotnet/src/AutoGen.SourceGenerator/FunctionCallGenerator.cs deleted file mode 100644 index c071a1ac55..0000000000 --- a/dotnet/src/AutoGen.SourceGenerator/FunctionCallGenerator.cs +++ /dev/null @@ -1,254 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// FunctionCallGenerator.cs - -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Xml.Linq; -using AutoGen.SourceGenerator.Template; -using Microsoft.CodeAnalysis; -using Microsoft.CodeAnalysis.CSharp; -using Microsoft.CodeAnalysis.CSharp.Syntax; -using Microsoft.CodeAnalysis.Text; -using Newtonsoft.Json; - -namespace AutoGen.SourceGenerator -{ - [Generator] - public partial class FunctionCallGenerator : IIncrementalGenerator - { - private const string FUNCTION_CALL_ATTRIBUTION = "AutoGen.Core.FunctionAttribute"; - - public void Initialize(IncrementalGeneratorInitializationContext context) - { -#if LAUNCH_DEBUGGER - if (!System.Diagnostics.Debugger.IsAttached) - { - System.Diagnostics.Debugger.Launch(); - } -#endif - var optionProvider = context.AnalyzerConfigOptionsProvider.Select((provider, ct) => - { - var generateFunctionDefinitionContract = provider.GlobalOptions.TryGetValue("build_property.EnableContract", out var value) && value?.ToLowerInvariant() == "true"; - - return generateFunctionDefinitionContract; - }); - // step 1 - // filter syntax tree and search syntax node that satisfied the following conditions - // - is partial class - var partialClassSyntaxProvider = context.SyntaxProvider.CreateSyntaxProvider( - (node, ct) => - { - return node is ClassDeclarationSyntax classDeclarationSyntax && classDeclarationSyntax.Modifiers.Any(SyntaxKind.PartialKeyword); - }, - (ctx, ct) => - { - // first check if any method of the class has FunctionAttribution attribute - // if not, then return null - var filePath = ctx.Node.SyntaxTree.FilePath; - var fileName = Path.GetFileNameWithoutExtension(filePath); - - - var classDeclarationSyntax = ctx.Node as ClassDeclarationSyntax; - var nameSpace = classDeclarationSyntax?.Parent as NamespaceDeclarationSyntax; - var fullClassName = $"{nameSpace?.Name}.{classDeclarationSyntax!.Identifier}"; - if (classDeclarationSyntax == null) - { - return null; - } - - if (!classDeclarationSyntax.Members.Any(member => member.AttributeLists.Any(attributeList => attributeList.Attributes.Any(attribute => - { - return ctx.SemanticModel.GetSymbolInfo(attribute).Symbol is IMethodSymbol methodSymbol && methodSymbol.ContainingType.ToDisplayString() == FUNCTION_CALL_ATTRIBUTION; - })))) - { - return null; - } - - // collect methods that has FunctionAttribution attribute - var methodDeclarationSyntaxes = classDeclarationSyntax.Members.Where(member => member.AttributeLists.Any(attributeList => attributeList.Attributes.Any(attribute => - { - return ctx.SemanticModel.GetSymbolInfo(attribute).Symbol is IMethodSymbol methodSymbol && methodSymbol.ContainingType.ToDisplayString() == FUNCTION_CALL_ATTRIBUTION; - }))) - .Select(member => member as MethodDeclarationSyntax) - .Where(method => method != null); - - var className = classDeclarationSyntax.Identifier.ToString(); - var namespaceName = classDeclarationSyntax.GetNamespaceNameFromClassDeclarationSyntax(); - var functionContracts = methodDeclarationSyntaxes.Select(method => CreateFunctionContract(method!, className, namespaceName)); - - return new PartialClassOutput(fullClassName, classDeclarationSyntax, functionContracts); - }) - .Where(node => node != null) - .Collect(); - - var aggregateProvider = optionProvider.Combine(partialClassSyntaxProvider); - // step 2 - context.RegisterSourceOutput(aggregateProvider, - (ctx, source) => - { - var groups = source.Right.GroupBy(item => item!.FullClassName); - foreach (var group in groups) - { - var functionContracts = group.SelectMany(item => item!.FunctionContracts).ToArray(); - var className = group.First()!.ClassDeclarationSyntax.Identifier.ToString(); - var namespaceName = group.First()!.ClassDeclarationSyntax.GetNamespaceNameFromClassDeclarationSyntax() ?? string.Empty; - var functionTT = new FunctionCallTemplate - { - NameSpace = namespaceName, - ClassName = className, - FunctionContracts = functionContracts.ToArray(), - }; - - var functionSource = functionTT.TransformText(); - var fileName = $"{className}.generated.cs"; - - ctx.AddSource(fileName, SourceText.From(functionSource, System.Text.Encoding.UTF8)); - File.WriteAllText(Path.Combine(Path.GetTempPath(), fileName), functionSource); - } - - if (source.Left) - { - var overallFunctionDefinition = source.Right.SelectMany(x => x!.FunctionContracts.Select(y => new { fullClassName = x.FullClassName, y = y })); - var overallFunctionDefinitionObject = overallFunctionDefinition.Select( - x => new - { - fullClassName = x.fullClassName, - functionDefinition = new - { - x.y.Name, - x.y.Description, - x.y.ReturnType, - Parameters = x.y.Parameters.Select(y => new - { - y.Name, - y.Description, - y.JsonType, - y.JsonItemType, - y.Type, - y.IsOptional, - y.DefaultValue, - }), - }, - }); - - var json = JsonConvert.SerializeObject(overallFunctionDefinitionObject, formatting: Formatting.Indented); - // wrap json inside csharp block, as SG doesn't support generating non-source file - json = $@"/* wrap json inside csharp block, as SG doesn't support generating non-source file -{json} -*/"; - ctx.AddSource("FunctionDefinition.json", SourceText.From(json, System.Text.Encoding.UTF8)); - } - }); - } - - private class PartialClassOutput - { - public PartialClassOutput(string fullClassName, ClassDeclarationSyntax classDeclarationSyntax, IEnumerable functionContracts) - { - FullClassName = fullClassName; - ClassDeclarationSyntax = classDeclarationSyntax; - FunctionContracts = functionContracts; - } - - public string FullClassName { get; } - - public ClassDeclarationSyntax ClassDeclarationSyntax { get; } - - public IEnumerable FunctionContracts { get; } - } - - private SourceGeneratorFunctionContract CreateFunctionContract(MethodDeclarationSyntax method, string? className, string? namespaceName) - { - // get function_call attribute - var functionCallAttribute = method.AttributeLists.SelectMany(attributeList => attributeList.Attributes) - .FirstOrDefault(attribute => attribute.Name.ToString() == FUNCTION_CALL_ATTRIBUTION); - // get document string if exist - var documentationCommentTrivia = method.GetDocumentationCommentTriviaSyntax(); - - var functionName = method.Identifier.ToString(); - var functionDescription = functionCallAttribute?.ArgumentList?.Arguments.FirstOrDefault(argument => argument.NameEquals?.Name.ToString() == "Description")?.Expression.ToString() ?? string.Empty; - - if (string.IsNullOrEmpty(functionDescription)) - { - // if functionDescription is empty, then try to get it from documentationCommentTrivia - // firstly, try getting from tag - var summary = documentationCommentTrivia?.Content.GetFirstXmlElement("summary"); - if (summary is not null && XElement.Parse(summary.ToString()) is XElement element) - { - functionDescription = element.Nodes().OfType().FirstOrDefault()?.Value; - - // remove [space...][//|///][space...] from functionDescription - // replace [^\S\r\n]+[\/]+\s* with empty string - functionDescription = System.Text.RegularExpressions.Regex.Replace(functionDescription, @"[^\S\r\n]+\/[\/]+\s*", string.Empty); - } - else - { - // if tag is not exist, then simply use the entire leading trivia as functionDescription - functionDescription = method.GetLeadingTrivia().ToString(); - - // remove [space...][//|///][space...] from functionDescription - // replace [^\S\r\n]+[\/]+\s* with empty string - functionDescription = System.Text.RegularExpressions.Regex.Replace(functionDescription, @"[^\S\r\n]+\/[\/]+\s*", string.Empty); - } - } - - // get parameters - var parameters = method.ParameterList.Parameters.Select(parameter => - { - var description = $"{parameter.Identifier}. type is {parameter.Type}"; - - // try to get parameter description from documentationCommentTrivia - var parameterDocumentationComment = documentationCommentTrivia?.GetParameterDescriptionFromDocumentationCommentTriviaSyntax(parameter.Identifier.ToString()); - if (parameterDocumentationComment is not null) - { - description = parameterDocumentationComment.ToString(); - // remove [space...][//|///][space...] from functionDescription - // replace [^\S\r\n]+[\/]+\s* with empty string - description = System.Text.RegularExpressions.Regex.Replace(description, @"[^\S\r\n]+\/[\/]+\s*", string.Empty); - } - var jsonItemType = parameter.Type!.ToString().EndsWith("[]") ? parameter.Type!.ToString().Substring(0, parameter.Type!.ToString().Length - 2) : null; - return new SourceGeneratorParameterContract - { - Name = parameter.Identifier.ToString(), - JsonType = parameter.Type!.ToString() switch - { - "string" => "string", - "string[]" => "array", - "System.Int32" or "int" => "integer", - "System.Int64" or "long" => "integer", - "System.Single" or "float" => "number", - "System.Double" or "double" => "number", - "System.Boolean" or "bool" => "boolean", - "System.DateTime" => "string", - "System.Guid" => "string", - "System.Object" => "object", - _ => "object", - }, - JsonItemType = jsonItemType, - Type = parameter.Type!.ToString(), - Description = description, - IsOptional = parameter.Default != null, - // if Default is null or "null", then DefaultValue is null - DefaultValue = parameter.Default?.ToString() == "null" ? null : parameter.Default?.Value.ToString(), - }; - }); - - return new SourceGeneratorFunctionContract - { - ClassName = className, - Namespace = namespaceName, - Name = functionName, - Description = functionDescription?.Trim() ?? functionName, - Parameters = parameters.ToArray(), - ReturnType = method.ReturnType.ToString(), - }; - } - } -} diff --git a/dotnet/src/AutoGen.SourceGenerator/FunctionExtension.cs b/dotnet/src/AutoGen.SourceGenerator/FunctionExtension.cs deleted file mode 100644 index 5e0aeb0f70..0000000000 --- a/dotnet/src/AutoGen.SourceGenerator/FunctionExtension.cs +++ /dev/null @@ -1,38 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// FunctionExtension.cs - -using AutoGen.SourceGenerator; - -internal static class FunctionExtension -{ - public static string GetFunctionName(this SourceGeneratorFunctionContract function) - { - return function.Name ?? string.Empty; - } - - public static string GetFunctionSchemaClassName(this SourceGeneratorFunctionContract function) - { - return $"{function.GetFunctionName()}Schema"; - } - - public static string GetFunctionDefinitionName(this SourceGeneratorFunctionContract function) - { - return $"{function.GetFunctionName()}Function"; - } - - public static string GetFunctionWrapperName(this SourceGeneratorFunctionContract function) - { - return $"{function.GetFunctionName()}Wrapper"; - } - - public static string GetFunctionContractName(this SourceGeneratorFunctionContract function) - { - return $"{function.GetFunctionName()}FunctionContract"; - } -} diff --git a/dotnet/src/AutoGen.SourceGenerator/README.md b/dotnet/src/AutoGen.SourceGenerator/README.md deleted file mode 100644 index a40fbe6040..0000000000 --- a/dotnet/src/AutoGen.SourceGenerator/README.md +++ /dev/null @@ -1,113 +0,0 @@ -### AutoGen.SourceGenerator - -This package carries a source generator that adds support for type-safe function definition generation. Simply mark a method with `Function` attribute, and the source generator will generate a function definition and a function call wrapper for you. - -### Get start - -First, add the following to your project file and set `GenerateDocumentationFile` property to true - -```xml - - - true - -``` -```xml - - - -``` - -> Nightly Build feed: https://devdiv.pkgs.visualstudio.com/DevDiv/_packaging/AutoGen/nuget/v3/index.json - -Then, for the methods you want to generate function definition and function call wrapper, mark them with `Function` attribute: - -> Note: For the best of performance, try using primitive types for the parameters and return type. - -```csharp -// file: MyFunctions.cs - -using AutoGen; - -// a partial class is required -// and the class must be public -public partial class MyFunctions -{ - /// - /// Add two numbers. - /// - /// The first number. - /// The second number. - [Function] - public Task AddAsync(int a, int b) - { - return Task.FromResult($"{a} + {b} = {a + b}"); - } -} -``` - -The source generator will generate the following code based on the method signature and documentation. It helps you save the effort of writing function definition and keep it up to date with the actual method signature. - -```csharp -// file: MyFunctions.generated.cs -public partial class MyFunctions -{ - private class AddAsyncSchema - { - public int a {get; set;} - public int b {get; set;} - } - - public Task AddAsyncWrapper(string arguments) - { - var schema = JsonSerializer.Deserialize( - arguments, - new JsonSerializerOptions - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - }); - return AddAsync(schema.a, schema.b); - } - - public FunctionDefinition AddAsyncFunction - { - get => new FunctionDefinition - { - Name = @"AddAsync", - Description = """ -Add two numbers. -""", - Parameters = BinaryData.FromObjectAsJson(new - { - Type = "object", - Properties = new - { - a = new - { - Type = @"number", - Description = @"The first number.", - }, - b = new - { - Type = @"number", - Description = @"The second number.", - }, - }, - Required = new [] - { - "a", - "b", - }, - }, - new JsonSerializerOptions - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - }) - }; - } -} -``` - -For more examples, please check out the following project -- [AutoGen.BasicSamples](../sample/AutoGen.BasicSamples/) -- [AutoGen.SourceGenerator.Tests](../../test/AutoGen.SourceGenerator.Tests/) diff --git a/dotnet/src/AutoGen.SourceGenerator/SourceGeneratorFunctionContract.cs b/dotnet/src/AutoGen.SourceGenerator/SourceGeneratorFunctionContract.cs deleted file mode 100644 index 25b3ee9adb..0000000000 --- a/dotnet/src/AutoGen.SourceGenerator/SourceGeneratorFunctionContract.cs +++ /dev/null @@ -1,46 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// SourceGeneratorFunctionContract.cs - -namespace AutoGen.SourceGenerator -{ - internal class SourceGeneratorFunctionContract - { - public string? Namespace { get; set; } - - public string? ClassName { get; set; } - - public string? Name { get; set; } - - public string? Description { get; set; } - - public string? ReturnDescription { get; set; } - - public SourceGeneratorParameterContract[]? Parameters { get; set; } - - public string? ReturnType { get; set; } - } - - internal class SourceGeneratorParameterContract - { - public string? Name { get; set; } - - public string? Description { get; set; } - - public string? JsonType { get; set; } - - public string? JsonItemType { get; set; } - - public string? Type { get; set; } - - public bool IsOptional { get; set; } - - public string? DefaultValue { get; set; } - - } -} diff --git a/dotnet/src/AutoGen.SourceGenerator/Template/FunctionCallTemplate.cs b/dotnet/src/AutoGen.SourceGenerator/Template/FunctionCallTemplate.cs deleted file mode 100644 index b90d78be3f..0000000000 --- a/dotnet/src/AutoGen.SourceGenerator/Template/FunctionCallTemplate.cs +++ /dev/null @@ -1,442 +0,0 @@ -ο»Ώ// ------------------------------------------------------------------------------ -// -// This code was generated by a tool. -// Runtime Version: 17.0.0.0 -// -// Changes to this file may cause incorrect behavior and will be lost if -// the code is regenerated. -// -// ------------------------------------------------------------------------------ -namespace AutoGen.SourceGenerator.Template -{ - using System.Linq; - using System.Collections.Generic; - using Microsoft.CodeAnalysis; - using System; - - /// - /// Class to produce the template output - /// - [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.TextTemplating", "17.0.0.0")] - internal partial class FunctionCallTemplate : FunctionCallTemplateBase - { - /// - /// Create the template output - /// - public virtual string TransformText() - { - this.Write("ο»Ώ"); - this.Write(@"//---------------------- -// -// This code was generated by a tool. -// -//---------------------- -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading.Tasks; -using System; -using AutoGen.Core; - -"); -if (!String.IsNullOrEmpty(NameSpace)) { - this.Write("namespace "); - this.Write(this.ToStringHelper.ToStringWithCulture(NameSpace)); - this.Write("\r\n{\r\n"); -} - this.Write(" public partial class "); - this.Write(this.ToStringHelper.ToStringWithCulture(ClassName)); - this.Write("\r\n {\r\n"); -foreach (var functionContract in FunctionContracts) { - this.Write("\r\n private class "); - this.Write(this.ToStringHelper.ToStringWithCulture(functionContract.GetFunctionSchemaClassName())); - this.Write("\r\n {\r\n"); -foreach (var parameter in functionContract.Parameters) { -if (parameter.IsOptional) { - this.Write(" [JsonPropertyName(@\""); - this.Write(this.ToStringHelper.ToStringWithCulture(parameter.Name)); - this.Write("\")]\r\n\t\t\tpublic "); - this.Write(this.ToStringHelper.ToStringWithCulture(parameter.Type)); - this.Write(" "); - this.Write(this.ToStringHelper.ToStringWithCulture(parameter.Name)); - this.Write(" {get; set;} = "); - this.Write(this.ToStringHelper.ToStringWithCulture(parameter.DefaultValue)); - this.Write(";\r\n"); -} else { - this.Write(" [JsonPropertyName(@\""); - this.Write(this.ToStringHelper.ToStringWithCulture(parameter.Name)); - this.Write("\")]\r\n\t\t\tpublic "); - this.Write(this.ToStringHelper.ToStringWithCulture(parameter.Type)); - this.Write(" "); - this.Write(this.ToStringHelper.ToStringWithCulture(parameter.Name)); - this.Write(" {get; set;}\r\n"); -} -} - this.Write(" }\r\n\r\n public "); - this.Write(this.ToStringHelper.ToStringWithCulture(functionContract.ReturnType)); - this.Write(" "); - this.Write(this.ToStringHelper.ToStringWithCulture(functionContract.GetFunctionWrapperName())); - this.Write("(string arguments)\r\n {\r\n var schema = JsonSerializer.Deserializ" + - "e<"); - this.Write(this.ToStringHelper.ToStringWithCulture(functionContract.GetFunctionSchemaClassName())); - this.Write(">(\r\n arguments, \r\n new JsonSerializerOptions\r\n " + - " {\r\n PropertyNamingPolicy = JsonNamingPolicy.CamelC" + - "ase,\r\n });\r\n"); - var argumentLists = string.Join(", ", functionContract.Parameters.Select(p => $"schema.{p.Name}")); - this.Write("\r\n return "); - this.Write(this.ToStringHelper.ToStringWithCulture(functionContract.Name)); - this.Write("("); - this.Write(this.ToStringHelper.ToStringWithCulture(argumentLists)); - this.Write(");\r\n }\r\n\r\n public FunctionContract "); - this.Write(this.ToStringHelper.ToStringWithCulture(functionContract.GetFunctionContractName())); - this.Write("\r\n {\r\n get => new FunctionContract\r\n {\r\n"); -if (functionContract.Namespace != null) { - this.Write(" Namespace = @\""); - this.Write(this.ToStringHelper.ToStringWithCulture(functionContract.Namespace)); - this.Write("\",\r\n"); -} -if (functionContract.ClassName != null) { - this.Write(" ClassName = @\""); - this.Write(this.ToStringHelper.ToStringWithCulture(functionContract.ClassName)); - this.Write("\",\r\n"); -} -if (functionContract.Name != null) { - this.Write(" Name = @\""); - this.Write(this.ToStringHelper.ToStringWithCulture(functionContract.Name)); - this.Write("\",\r\n"); -} -if (functionContract.Description != null) { - this.Write(" Description = @\""); - this.Write(this.ToStringHelper.ToStringWithCulture(functionContract.Description.Replace("\"", "\"\""))); - this.Write("\",\r\n"); -} -if (functionContract.ReturnType != null) { - this.Write(" ReturnType = typeof("); - this.Write(this.ToStringHelper.ToStringWithCulture(functionContract.ReturnType)); - this.Write("),\r\n"); -} -if (functionContract.ReturnDescription != null) { - this.Write(" ReturnDescription = @\""); - this.Write(this.ToStringHelper.ToStringWithCulture(functionContract.ReturnDescription)); - this.Write("\",\r\n"); -} -if (functionContract.Parameters != null) { - this.Write(" Parameters = new global::AutoGen.Core.FunctionParameterContract[]" + - "\r\n {\r\n"); -foreach (var parameter in functionContract.Parameters) { - this.Write(" new FunctionParameterContract\r\n {\r\n"); -if (parameter.Name != null) { - this.Write(" Name = @\""); - this.Write(this.ToStringHelper.ToStringWithCulture(parameter.Name)); - this.Write("\",\r\n"); -} -if (parameter.Description != null) { - this.Write(" Description = @\""); - this.Write(this.ToStringHelper.ToStringWithCulture(parameter.Description.Replace("\"", "\"\""))); - this.Write("\",\r\n"); -} -if (parameter.Type != null) { - this.Write(" ParameterType = typeof("); - this.Write(this.ToStringHelper.ToStringWithCulture(parameter.Type)); - this.Write("),\r\n"); -} - this.Write(" IsRequired = "); - this.Write(this.ToStringHelper.ToStringWithCulture(parameter.IsOptional ? "false" : "true")); - this.Write(",\r\n"); -if (parameter.DefaultValue != null) { - this.Write(" DefaultValue = "); - this.Write(this.ToStringHelper.ToStringWithCulture(parameter.DefaultValue)); - this.Write(",\r\n"); -} - this.Write(" },\r\n"); -} - this.Write(" },\r\n"); -} - this.Write(" };\r\n }\r\n"); -} - this.Write(" }\r\n"); -if (!String.IsNullOrEmpty(NameSpace)) { - this.Write("}\r\n"); -} - this.Write("\r\n"); - return this.GenerationEnvironment.ToString(); - } - -public string NameSpace {get; set;} -public string ClassName {get; set;} -public IEnumerable FunctionContracts {get; set;} -public bool IsStatic {get; set;} = false; - - } - #region Base class - /// - /// Base class for this transformation - /// - [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.TextTemplating", "17.0.0.0")] - internal class FunctionCallTemplateBase - { - #region Fields - private global::System.Text.StringBuilder generationEnvironmentField; - private global::System.CodeDom.Compiler.CompilerErrorCollection errorsField; - private global::System.Collections.Generic.List indentLengthsField; - private string currentIndentField = ""; - private bool endsWithNewline; - private global::System.Collections.Generic.IDictionary sessionField; - #endregion - #region Properties - /// - /// The string builder that generation-time code is using to assemble generated output - /// - public System.Text.StringBuilder GenerationEnvironment - { - get - { - if ((this.generationEnvironmentField == null)) - { - this.generationEnvironmentField = new global::System.Text.StringBuilder(); - } - return this.generationEnvironmentField; - } - set - { - this.generationEnvironmentField = value; - } - } - /// - /// The error collection for the generation process - /// - public System.CodeDom.Compiler.CompilerErrorCollection Errors - { - get - { - if ((this.errorsField == null)) - { - this.errorsField = new global::System.CodeDom.Compiler.CompilerErrorCollection(); - } - return this.errorsField; - } - } - /// - /// A list of the lengths of each indent that was added with PushIndent - /// - private System.Collections.Generic.List indentLengths - { - get - { - if ((this.indentLengthsField == null)) - { - this.indentLengthsField = new global::System.Collections.Generic.List(); - } - return this.indentLengthsField; - } - } - /// - /// Gets the current indent we use when adding lines to the output - /// - public string CurrentIndent - { - get - { - return this.currentIndentField; - } - } - /// - /// Current transformation session - /// - public virtual global::System.Collections.Generic.IDictionary Session - { - get - { - return this.sessionField; - } - set - { - this.sessionField = value; - } - } - #endregion - #region Transform-time helpers - /// - /// Write text directly into the generated output - /// - public void Write(string textToAppend) - { - if (string.IsNullOrEmpty(textToAppend)) - { - return; - } - // If we're starting off, or if the previous text ended with a newline, - // we have to append the current indent first. - if (((this.GenerationEnvironment.Length == 0) - || this.endsWithNewline)) - { - this.GenerationEnvironment.Append(this.currentIndentField); - this.endsWithNewline = false; - } - // Check if the current text ends with a newline - if (textToAppend.EndsWith(global::System.Environment.NewLine, global::System.StringComparison.CurrentCulture)) - { - this.endsWithNewline = true; - } - // This is an optimization. If the current indent is "", then we don't have to do any - // of the more complex stuff further down. - if ((this.currentIndentField.Length == 0)) - { - this.GenerationEnvironment.Append(textToAppend); - return; - } - // Everywhere there is a newline in the text, add an indent after it - textToAppend = textToAppend.Replace(global::System.Environment.NewLine, (global::System.Environment.NewLine + this.currentIndentField)); - // If the text ends with a newline, then we should strip off the indent added at the very end - // because the appropriate indent will be added when the next time Write() is called - if (this.endsWithNewline) - { - this.GenerationEnvironment.Append(textToAppend, 0, (textToAppend.Length - this.currentIndentField.Length)); - } - else - { - this.GenerationEnvironment.Append(textToAppend); - } - } - /// - /// Write text directly into the generated output - /// - public void WriteLine(string textToAppend) - { - this.Write(textToAppend); - this.GenerationEnvironment.AppendLine(); - this.endsWithNewline = true; - } - /// - /// Write formatted text directly into the generated output - /// - public void Write(string format, params object[] args) - { - this.Write(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args)); - } - /// - /// Write formatted text directly into the generated output - /// - public void WriteLine(string format, params object[] args) - { - this.WriteLine(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args)); - } - /// - /// Raise an error - /// - public void Error(string message) - { - System.CodeDom.Compiler.CompilerError error = new global::System.CodeDom.Compiler.CompilerError(); - error.ErrorText = message; - this.Errors.Add(error); - } - /// - /// Raise a warning - /// - public void Warning(string message) - { - System.CodeDom.Compiler.CompilerError error = new global::System.CodeDom.Compiler.CompilerError(); - error.ErrorText = message; - error.IsWarning = true; - this.Errors.Add(error); - } - /// - /// Increase the indent - /// - public void PushIndent(string indent) - { - if ((indent == null)) - { - throw new global::System.ArgumentNullException("indent"); - } - this.currentIndentField = (this.currentIndentField + indent); - this.indentLengths.Add(indent.Length); - } - /// - /// Remove the last indent that was added with PushIndent - /// - public string PopIndent() - { - string returnValue = ""; - if ((this.indentLengths.Count > 0)) - { - int indentLength = this.indentLengths[(this.indentLengths.Count - 1)]; - this.indentLengths.RemoveAt((this.indentLengths.Count - 1)); - if ((indentLength > 0)) - { - returnValue = this.currentIndentField.Substring((this.currentIndentField.Length - indentLength)); - this.currentIndentField = this.currentIndentField.Remove((this.currentIndentField.Length - indentLength)); - } - } - return returnValue; - } - /// - /// Remove any indentation - /// - public void ClearIndent() - { - this.indentLengths.Clear(); - this.currentIndentField = ""; - } - #endregion - #region ToString Helpers - /// - /// Utility class to produce culture-oriented representation of an object as a string. - /// - public class ToStringInstanceHelper - { - private System.IFormatProvider formatProviderField = global::System.Globalization.CultureInfo.InvariantCulture; - /// - /// Gets or sets format provider to be used by ToStringWithCulture method. - /// - public System.IFormatProvider FormatProvider - { - get - { - return this.formatProviderField ; - } - set - { - if ((value != null)) - { - this.formatProviderField = value; - } - } - } - /// - /// This is called from the compile/run appdomain to convert objects within an expression block to a string - /// - public string ToStringWithCulture(object objectToConvert) - { - if ((objectToConvert == null)) - { - throw new global::System.ArgumentNullException("objectToConvert"); - } - System.Type t = objectToConvert.GetType(); - System.Reflection.MethodInfo method = t.GetMethod("ToString", new System.Type[] { - typeof(System.IFormatProvider)}); - if ((method == null)) - { - return objectToConvert.ToString(); - } - else - { - return ((string)(method.Invoke(objectToConvert, new object[] { - this.formatProviderField }))); - } - } - } - private ToStringInstanceHelper toStringHelperField = new ToStringInstanceHelper(); - /// - /// Helper to produce culture-oriented representation of an object as a string - /// - public ToStringInstanceHelper ToStringHelper - { - get - { - return this.toStringHelperField; - } - } - #endregion - } - #endregion -} diff --git a/dotnet/src/AutoGen.SourceGenerator/Template/FunctionCallTemplate.tt b/dotnet/src/AutoGen.SourceGenerator/Template/FunctionCallTemplate.tt deleted file mode 100644 index e7ed476fde..0000000000 --- a/dotnet/src/AutoGen.SourceGenerator/Template/FunctionCallTemplate.tt +++ /dev/null @@ -1,109 +0,0 @@ -ο»Ώο»Ώ<#@ template language="C#" linePragmas="false" visibility = "internal" #> -<#@ assembly name="System.Core" #> -<#@ import namespace="System.Linq" #> -<#@ import namespace="System.Collections.Generic" #> -<#@ import namespace="Microsoft.CodeAnalysis" #> -//---------------------- -// -// This code was generated by a tool. -// -//---------------------- -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading.Tasks; -using System; -using AutoGen.Core; - -<#if (!String.IsNullOrEmpty(NameSpace)) {#> -namespace <#=NameSpace#> -{ -<#}#> - public partial class <#=ClassName#> - { -<#foreach (var functionContract in FunctionContracts) {#> - - private class <#=functionContract.GetFunctionSchemaClassName()#> - { -<#foreach (var parameter in functionContract.Parameters) {#> -<#if (parameter.IsOptional) {#> - [JsonPropertyName(@"<#=parameter.Name#>")] - public <#=parameter.Type#> <#=parameter.Name#> {get; set;} = <#=parameter.DefaultValue#>; -<#} else {#> - [JsonPropertyName(@"<#=parameter.Name#>")] - public <#=parameter.Type#> <#=parameter.Name#> {get; set;} -<#}#> -<#}#> - } - - public <#=functionContract.ReturnType#> <#=functionContract.GetFunctionWrapperName()#>(string arguments) - { - var schema = JsonSerializer.Deserialize<<#=functionContract.GetFunctionSchemaClassName()#>>( - arguments, - new JsonSerializerOptions - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - }); -<# var argumentLists = string.Join(", ", functionContract.Parameters.Select(p => $"schema.{p.Name}")); #> - - return <#=functionContract.Name#>(<#=argumentLists#>); - } - - public FunctionContract <#=functionContract.GetFunctionContractName()#> - { - get => new FunctionContract - { -<#if (functionContract.Namespace != null) {#> - Namespace = @"<#=functionContract.Namespace#>", -<#}#> -<#if (functionContract.ClassName != null) {#> - ClassName = @"<#=functionContract.ClassName#>", -<#}#> -<#if (functionContract.Name != null) {#> - Name = @"<#=functionContract.Name#>", -<#}#> -<#if (functionContract.Description != null) {#> - Description = @"<#=functionContract.Description.Replace("\"", "\"\"")#>", -<#}#> -<#if (functionContract.ReturnType != null) {#> - ReturnType = typeof(<#=functionContract.ReturnType#>), -<#}#> -<#if (functionContract.ReturnDescription != null) {#> - ReturnDescription = @"<#=functionContract.ReturnDescription#>", -<#}#> -<#if (functionContract.Parameters != null) {#> - Parameters = new global::AutoGen.Core.FunctionParameterContract[] - { -<#foreach (var parameter in functionContract.Parameters) {#> - new FunctionParameterContract - { -<#if (parameter.Name != null) {#> - Name = @"<#=parameter.Name#>", -<#}#> -<#if (parameter.Description != null) {#> - Description = @"<#= parameter.Description.Replace("\"", "\"\"") #>", -<#}#> -<#if (parameter.Type != null) {#> - ParameterType = typeof(<#=parameter.Type#>), -<#}#> - IsRequired = <#=parameter.IsOptional ? "false" : "true"#>, -<#if (parameter.DefaultValue != null) {#> - DefaultValue = <#=parameter.DefaultValue#>, -<#}#> - }, -<#}#> - }, -<#}#> - }; - } -<#}#> - } -<#if (!String.IsNullOrEmpty(NameSpace)) {#> -} -<#}#> - -<#+ -public string NameSpace {get; set;} -public string ClassName {get; set;} -public IEnumerable FunctionContracts {get; set;} -public bool IsStatic {get; set;} = false; -#> \ No newline at end of file diff --git a/dotnet/src/AutoGen.WebAPI/AutoGen.WebAPI.csproj b/dotnet/src/AutoGen.WebAPI/AutoGen.WebAPI.csproj deleted file mode 100644 index c5b7207647..0000000000 --- a/dotnet/src/AutoGen.WebAPI/AutoGen.WebAPI.csproj +++ /dev/null @@ -1,27 +0,0 @@ -ο»Ώ - - - net6.0;net8.0 - true - $(NoWarn);CS1591;CS1573 - - - - - - - - AutoGen.WebAPI - - Turn an `AutoGen.Core.IAgent` into a RESTful API. - - - - - - - - - - - diff --git a/dotnet/src/AutoGen.WebAPI/Extension.cs b/dotnet/src/AutoGen.WebAPI/Extension.cs deleted file mode 100644 index 63ec99f283..0000000000 --- a/dotnet/src/AutoGen.WebAPI/Extension.cs +++ /dev/null @@ -1,30 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Extension.cs - -using AutoGen.Core; -using Microsoft.AspNetCore.Builder; - -namespace AutoGen.WebAPI; - -public static class Extension -{ - /// - /// Serve the agent as an OpenAI chat completion endpoint using . - /// If the request path is /v1/chat/completions and model name is the same as the agent name, - /// the request will be handled by the agent. - /// otherwise, the request will be passed to the next middleware. - /// - /// application builder - /// - public static IApplicationBuilder UseAgentAsOpenAIChatCompletionEndpoint(this IApplicationBuilder app, IAgent agent) - { - var middleware = new OpenAIChatCompletionMiddleware(agent); - return app.Use(middleware.InvokeAsync); - } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/Converter/OpenAIMessageConverter.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/Converter/OpenAIMessageConverter.cs deleted file mode 100644 index 8617a99e98..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/Converter/OpenAIMessageConverter.cs +++ /dev/null @@ -1,62 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIMessageConverter.cs - -using System; -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace AutoGen.WebAPI.OpenAI.DTO; - -internal class OpenAIMessageConverter : JsonConverter -{ - public override OpenAIMessage Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) - { - using JsonDocument document = JsonDocument.ParseValue(ref reader); - var root = document.RootElement; - var role = root.GetProperty("role").GetString(); - var contentDocument = root.GetProperty("content"); - var isContentDocumentString = contentDocument.ValueKind == JsonValueKind.String; - switch (role) - { - case "system": - return JsonSerializer.Deserialize(root.GetRawText()) ?? throw new JsonException(); - case "user" when isContentDocumentString: - return JsonSerializer.Deserialize(root.GetRawText()) ?? throw new JsonException(); - case "user" when !isContentDocumentString: - return JsonSerializer.Deserialize(root.GetRawText()) ?? throw new JsonException(); - case "assistant": - return JsonSerializer.Deserialize(root.GetRawText()) ?? throw new JsonException(); - case "tool": - return JsonSerializer.Deserialize(root.GetRawText()) ?? throw new JsonException(); - default: - throw new JsonException(); - } - } - - public override void Write(Utf8JsonWriter writer, OpenAIMessage value, JsonSerializerOptions options) - { - switch (value) - { - case OpenAISystemMessage systemMessage: - JsonSerializer.Serialize(writer, systemMessage, options); - break; - case OpenAIUserMessage userMessage: - JsonSerializer.Serialize(writer, userMessage, options); - break; - case OpenAIAssistantMessage assistantMessage: - JsonSerializer.Serialize(writer, assistantMessage, options); - break; - case OpenAIToolMessage toolMessage: - JsonSerializer.Serialize(writer, toolMessage, options); - break; - default: - throw new JsonException(); - } - } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIAssistantMessage.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIAssistantMessage.cs deleted file mode 100644 index aa4ef21df5..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIAssistantMessage.cs +++ /dev/null @@ -1,27 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIAssistantMessage.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.WebAPI.OpenAI.DTO; - -internal class OpenAIAssistantMessage : OpenAIMessage -{ - [JsonPropertyName("role")] - public override string? Role { get; } = "assistant"; - - [JsonPropertyName("content")] - public string? Content { get; set; } - - [JsonPropertyName("name")] - public string? Name { get; set; } - - [JsonPropertyName("tool_calls")] - public OpenAIToolCallObject[]? ToolCalls { get; set; } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletion.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletion.cs deleted file mode 100644 index f0aa4a0c30..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletion.cs +++ /dev/null @@ -1,36 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIChatCompletion.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.WebAPI.OpenAI.DTO; - -internal class OpenAIChatCompletion -{ - [JsonPropertyName("id")] - public string? ID { get; set; } - - [JsonPropertyName("created")] - public long Created { get; set; } - - [JsonPropertyName("choices")] - public OpenAIChatCompletionChoice[]? Choices { get; set; } - - [JsonPropertyName("model")] - public string? Model { get; set; } - - [JsonPropertyName("system_fingerprint")] - public string? SystemFingerprint { get; set; } - - [JsonPropertyName("object")] - public string Object { get; set; } = "chat.completion"; - - [JsonPropertyName("usage")] - public OpenAIChatCompletionUsage? Usage { get; set; } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletionChoice.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletionChoice.cs deleted file mode 100644 index 68aaa4043d..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletionChoice.cs +++ /dev/null @@ -1,27 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIChatCompletionChoice.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.WebAPI.OpenAI.DTO; - -internal class OpenAIChatCompletionChoice -{ - [JsonPropertyName("finish_reason")] - public string? FinishReason { get; set; } - - [JsonPropertyName("index")] - public int Index { get; set; } - - [JsonPropertyName("message")] - public OpenAIChatCompletionMessage? Message { get; set; } - - [JsonPropertyName("delta")] - public OpenAIChatCompletionMessage? Delta { get; set; } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletionMessage.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletionMessage.cs deleted file mode 100644 index 0dcebc34ec..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletionMessage.cs +++ /dev/null @@ -1,21 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIChatCompletionMessage.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.WebAPI.OpenAI.DTO; - -internal class OpenAIChatCompletionMessage -{ - [JsonPropertyName("role")] - public string Role { get; } = "assistant"; - - [JsonPropertyName("content")] - public string? Content { get; set; } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletionOption.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletionOption.cs deleted file mode 100644 index fa174027fa..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletionOption.cs +++ /dev/null @@ -1,39 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIChatCompletionOption.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.WebAPI.OpenAI.DTO; - -internal class OpenAIChatCompletionOption -{ - [JsonPropertyName("messages")] - public OpenAIMessage[]? Messages { get; set; } - - [JsonPropertyName("model")] - public string? Model { get; set; } - - [JsonPropertyName("max_tokens")] - public int? MaxTokens { get; set; } - - [JsonPropertyName("temperature")] - public float Temperature { get; set; } = 1; - - /// - /// If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message - /// - [JsonPropertyName("stream")] - public bool? Stream { get; set; } = false; - - [JsonPropertyName("stream_options")] - public OpenAIStreamOptions? StreamOptions { get; set; } - - [JsonPropertyName("stop")] - public string[]? Stop { get; set; } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletionUsage.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletionUsage.cs deleted file mode 100644 index e90a9089b3..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIChatCompletionUsage.cs +++ /dev/null @@ -1,24 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIChatCompletionUsage.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.WebAPI.OpenAI.DTO; - -internal class OpenAIChatCompletionUsage -{ - [JsonPropertyName("completion_tokens")] - public int CompletionTokens { get; set; } - - [JsonPropertyName("prompt_tokens")] - public int PromptTokens { get; set; } - - [JsonPropertyName("total_tokens")] - public int TotalTokens { get; set; } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIImageUrlObject.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIImageUrlObject.cs deleted file mode 100644 index 76f47f4d41..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIImageUrlObject.cs +++ /dev/null @@ -1,21 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIImageUrlObject.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.WebAPI.OpenAI.DTO; - -internal class OpenAIImageUrlObject -{ - [JsonPropertyName("url")] - public string? Url { get; set; } - - [JsonPropertyName("detail")] - public string? Detail { get; set; } = "auto"; -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIMessage.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIMessage.cs deleted file mode 100644 index 07c00c0a51..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIMessage.cs +++ /dev/null @@ -1,19 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIMessage.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.WebAPI.OpenAI.DTO; - -[JsonConverter(typeof(OpenAIMessageConverter))] -internal abstract class OpenAIMessage -{ - [JsonPropertyName("role")] - public abstract string? Role { get; } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIStreamOptions.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIStreamOptions.cs deleted file mode 100644 index b8020d5366..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIStreamOptions.cs +++ /dev/null @@ -1,18 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIStreamOptions.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.WebAPI.OpenAI.DTO; - -internal class OpenAIStreamOptions -{ - [JsonPropertyName("include_usage")] - public bool? IncludeUsage { get; set; } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAISystemMessage.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAISystemMessage.cs deleted file mode 100644 index ae4b8feff4..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAISystemMessage.cs +++ /dev/null @@ -1,24 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAISystemMessage.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.WebAPI.OpenAI.DTO; - -internal class OpenAISystemMessage : OpenAIMessage -{ - [JsonPropertyName("role")] - public override string? Role { get; } = "system"; - - [JsonPropertyName("content")] - public string? Content { get; set; } - - [JsonPropertyName("name")] - public string? Name { get; set; } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIToolCallObject.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIToolCallObject.cs deleted file mode 100644 index 744d0895b7..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIToolCallObject.cs +++ /dev/null @@ -1,21 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIToolCallObject.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.WebAPI.OpenAI.DTO; - -internal class OpenAIToolCallObject -{ - [JsonPropertyName("name")] - public string? Name { get; set; } - - [JsonPropertyName("arguments")] - public string? Arguments { get; set; } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIToolMessage.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIToolMessage.cs deleted file mode 100644 index fb31f68b7a..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIToolMessage.cs +++ /dev/null @@ -1,24 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIToolMessage.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.WebAPI.OpenAI.DTO; - -internal class OpenAIToolMessage : OpenAIMessage -{ - [JsonPropertyName("role")] - public override string? Role { get; } = "tool"; - - [JsonPropertyName("content")] - public string? Content { get; set; } - - [JsonPropertyName("tool_call_id")] - public string? ToolCallId { get; set; } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserImageContent.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserImageContent.cs deleted file mode 100644 index 825d87f221..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserImageContent.cs +++ /dev/null @@ -1,21 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIUserImageContent.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.WebAPI.OpenAI.DTO; - -internal class OpenAIUserImageContent : OpenAIUserMessageItem -{ - [JsonPropertyName("type")] - public override string MessageType { get; } = "image"; - - [JsonPropertyName("image_url")] - public string? Url { get; set; } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserMessage.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserMessage.cs deleted file mode 100644 index 816904fcd4..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserMessage.cs +++ /dev/null @@ -1,24 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIUserMessage.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.WebAPI.OpenAI.DTO; - -internal class OpenAIUserMessage : OpenAIMessage -{ - [JsonPropertyName("role")] - public override string? Role { get; } = "user"; - - [JsonPropertyName("content")] - public string? Content { get; set; } - - [JsonPropertyName("name")] - public string? Name { get; set; } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserMessageItem.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserMessageItem.cs deleted file mode 100644 index 6d1bec8405..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserMessageItem.cs +++ /dev/null @@ -1,18 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIUserMessageItem.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.WebAPI.OpenAI.DTO; - -internal abstract class OpenAIUserMessageItem -{ - [JsonPropertyName("type")] - public abstract string MessageType { get; } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserMultiModalMessage.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserMultiModalMessage.cs deleted file mode 100644 index 8a5bfe7442..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserMultiModalMessage.cs +++ /dev/null @@ -1,24 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIUserMultiModalMessage.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.WebAPI.OpenAI.DTO; - -internal class OpenAIUserMultiModalMessage : OpenAIMessage -{ - [JsonPropertyName("role")] - public override string? Role { get; } = "user"; - - [JsonPropertyName("content")] - public OpenAIUserMessageItem[]? Content { get; set; } - - [JsonPropertyName("name")] - public string? Name { get; set; } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserTextContent.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserTextContent.cs deleted file mode 100644 index ba0c6400e7..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/DTO/OpenAIUserTextContent.cs +++ /dev/null @@ -1,21 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIUserTextContent.cs - -using System.Text.Json.Serialization; - -namespace AutoGen.WebAPI.OpenAI.DTO; - -internal class OpenAIUserTextContent : OpenAIUserMessageItem -{ - [JsonPropertyName("type")] - public override string MessageType { get; } = "text"; - - [JsonPropertyName("text")] - public string? Content { get; set; } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/Service/OpenAIChatCompletionService.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/Service/OpenAIChatCompletionService.cs deleted file mode 100644 index 648506c384..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/Service/OpenAIChatCompletionService.cs +++ /dev/null @@ -1,163 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIChatCompletionService.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; -using AutoGen.Core; -using AutoGen.WebAPI.OpenAI.DTO; - -namespace AutoGen.Server; - -internal class OpenAIChatCompletionService -{ - private readonly IAgent agent; - - public OpenAIChatCompletionService(IAgent agent) - { - this.agent = agent; - } - - public async Task GetChatCompletionAsync(OpenAIChatCompletionOption request) - { - var messages = this.ProcessMessages(request.Messages ?? Array.Empty()); - - var generateOption = this.ProcessReplyOptions(request); - - var reply = await this.agent.GenerateReplyAsync(messages, generateOption); - - var openAIChatCompletion = new OpenAIChatCompletion() - { - Created = DateTimeOffset.UtcNow.Ticks / TimeSpan.TicksPerMillisecond / 1000, - Model = this.agent.Name, - }; - - if (reply.GetContent() is string content) - { - var message = new OpenAIChatCompletionMessage() - { - Content = content, - }; - - var choice = new OpenAIChatCompletionChoice() - { - Message = message, - Index = 0, - FinishReason = "completed", - }; - - openAIChatCompletion.Choices = [choice]; - - return openAIChatCompletion; - } - - throw new NotImplementedException("Unsupported reply content type"); - } - - public async IAsyncEnumerable GetStreamingChatCompletionAsync(OpenAIChatCompletionOption request) - { - if (this.agent is IStreamingAgent streamingAgent) - { - var messages = this.ProcessMessages(request.Messages ?? Array.Empty()); - - var generateOption = this.ProcessReplyOptions(request); - - await foreach (var reply in streamingAgent.GenerateStreamingReplyAsync(messages, generateOption)) - { - var openAIChatCompletion = new OpenAIChatCompletion() - { - Created = DateTimeOffset.UtcNow.Ticks / TimeSpan.TicksPerMillisecond / 1000, - Model = this.agent.Name, - }; - - if (reply.GetContent() is string content) - { - var message = new OpenAIChatCompletionMessage() - { - Content = content, - }; - - var choice = new OpenAIChatCompletionChoice() - { - Delta = message, - Index = 0, - }; - - openAIChatCompletion.Choices = [choice]; - - yield return openAIChatCompletion; - } - else - { - throw new NotImplementedException("Unsupported reply content type"); - } - } - - var doneMessage = new OpenAIChatCompletion() - { - Created = DateTimeOffset.UtcNow.Ticks / TimeSpan.TicksPerMillisecond / 1000, - Model = this.agent.Name, - }; - - var doneChoice = new OpenAIChatCompletionChoice() - { - FinishReason = "stop", - Index = 0, - }; - - doneMessage.Choices = [doneChoice]; - - yield return doneMessage; - } - else - { - yield return await this.GetChatCompletionAsync(request); - } - } - - private IEnumerable ProcessMessages(IEnumerable messages) - { - return messages.Select(m => m switch - { - OpenAISystemMessage systemMessage when systemMessage.Content is string content => new TextMessage(Role.System, content, this.agent.Name), - OpenAIUserMessage userMessage when userMessage.Content is string content => new TextMessage(Role.User, content, this.agent.Name), - OpenAIAssistantMessage assistantMessage when assistantMessage.Content is string content => new TextMessage(Role.Assistant, content, this.agent.Name), - OpenAIUserMultiModalMessage userMultiModalMessage when userMultiModalMessage.Content is { Length: > 0 } => this.CreateMultiModaMessageFromOpenAIUserMultiModalMessage(userMultiModalMessage), - _ => throw new ArgumentException($"Unsupported message type {m.GetType()}") - }); - } - - private GenerateReplyOptions ProcessReplyOptions(OpenAIChatCompletionOption request) - { - return new GenerateReplyOptions() - { - Temperature = request.Temperature, - MaxToken = request.MaxTokens, - StopSequence = request.Stop, - }; - } - - private MultiModalMessage CreateMultiModaMessageFromOpenAIUserMultiModalMessage(OpenAIUserMultiModalMessage message) - { - if (message.Content is null) - { - throw new ArgumentNullException(nameof(message.Content)); - } - - IEnumerable items = message.Content.Select(item => item switch - { - OpenAIUserImageContent imageContent when imageContent.Url is string url => new ImageMessage(Role.User, url, this.agent.Name), - OpenAIUserTextContent textContent when textContent.Content is string content => new TextMessage(Role.User, content, this.agent.Name), - _ => throw new ArgumentException($"Unsupported content type {item.GetType()}") - }); - - return new MultiModalMessage(Role.User, items, this.agent.Name); - } -} diff --git a/dotnet/src/AutoGen.WebAPI/OpenAIChatCompletionMiddleware.cs b/dotnet/src/AutoGen.WebAPI/OpenAIChatCompletionMiddleware.cs deleted file mode 100644 index 3a45d3810b..0000000000 --- a/dotnet/src/AutoGen.WebAPI/OpenAIChatCompletionMiddleware.cs +++ /dev/null @@ -1,98 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIChatCompletionMiddleware.cs - -using System.Text.Json; -using System.Threading.Tasks; -using AutoGen.Core; -using AutoGen.Server; -using AutoGen.WebAPI.OpenAI.DTO; -using Microsoft.AspNetCore.Http; - -namespace AutoGen.WebAPI; - -public class OpenAIChatCompletionMiddleware : Microsoft.AspNetCore.Http.IMiddleware -{ - private readonly IAgent _agent; - private readonly OpenAIChatCompletionService chatCompletionService; - - public OpenAIChatCompletionMiddleware(IAgent agent) - { - _agent = agent; - chatCompletionService = new OpenAIChatCompletionService(_agent); - } - - public async Task InvokeAsync(HttpContext context, RequestDelegate next) - { - // if HttpPost and path is /v1/chat/completions - // get the request body - // call chatCompletionService.GetChatCompletionAsync(request) - // return the response - - // else - // call next middleware - if (context.Request.Method == HttpMethods.Post && context.Request.Path == "/v1/chat/completions") - { - context.Request.EnableBuffering(); - var body = await context.Request.ReadFromJsonAsync(); - context.Request.Body.Position = 0; - if (body is null) - { - // return 400 Bad Request - context.Response.StatusCode = 400; - return; - } - - if (body.Model != _agent.Name) - { - await next(context); - return; - } - - if (body.Stream is true) - { - // Send as server side events - context.Response.Headers.Append("Content-Type", "text/event-stream"); - context.Response.Headers.Append("Cache-Control", "no-cache"); - context.Response.Headers.Append("Connection", "keep-alive"); - await foreach (var chatCompletion in chatCompletionService.GetStreamingChatCompletionAsync(body)) - { - if (chatCompletion?.Choices?[0].FinishReason is "stop") - { - // the stream is done - // send Data: [DONE]\n\n - await context.Response.WriteAsync("data: [DONE]\n\n"); - break; - } - else - { - // remove null - var option = new JsonSerializerOptions - { - DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull, - }; - var data = JsonSerializer.Serialize(chatCompletion, option); - await context.Response.WriteAsync($"data: {data}\n\n"); - } - } - - return; - } - else - { - var chatCompletion = await chatCompletionService.GetChatCompletionAsync(body); - await context.Response.WriteAsJsonAsync(chatCompletion); - return; - } - } - else - { - await next(context); - } - } -} diff --git a/dotnet/src/AutoGen/API/LLMConfigAPI.cs b/dotnet/src/AutoGen/API/LLMConfigAPI.cs deleted file mode 100644 index 0975764b96..0000000000 --- a/dotnet/src/AutoGen/API/LLMConfigAPI.cs +++ /dev/null @@ -1,56 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// LLMConfigAPI.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using AutoGen.OpenAI.V1; - -namespace AutoGen -{ - public static class LLMConfigAPI - { - public static IEnumerable GetOpenAIConfigList( - string apiKey, - IEnumerable? modelIDs = null) - { - var models = modelIDs ?? new[] - { - "gpt-3.5-turbo", - "gpt-3.5-turbo-16k", - "gpt-4", - "gpt-4-32k", - "gpt-4-0613", - "gpt-4-32k-0613", - "gpt-4-1106-preview", - }; - - return models.Select(modelId => new OpenAIConfig(apiKey, modelId)); - } - - public static IEnumerable GetAzureOpenAIConfigList( - string endpoint, - string apiKey, - IEnumerable deploymentNames) - { - return deploymentNames.Select(deploymentName => new AzureOpenAIConfig(endpoint, deploymentName, apiKey)); - } - - /// - /// Get a list of LLMConfig objects from a JSON file. - /// - internal static IEnumerable ConfigListFromJson( - string filePath, - IEnumerable? filterModels = null) - { - // Disable this API from documentation for now. - throw new NotImplementedException(); - } - } -} diff --git a/dotnet/src/AutoGen/Agent/AssistantAgent.cs b/dotnet/src/AutoGen/Agent/AssistantAgent.cs deleted file mode 100644 index 865fe66f91..0000000000 --- a/dotnet/src/AutoGen/Agent/AssistantAgent.cs +++ /dev/null @@ -1,36 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// AssistantAgent.cs - -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen; - -public class AssistantAgent : ConversableAgent -{ - public AssistantAgent( - string name, - string systemMessage = "You are a helpful AI assistant", - ConversableAgentConfig? llmConfig = null, - Func, CancellationToken, Task>? isTermination = null, - HumanInputMode humanInputMode = HumanInputMode.NEVER, - IDictionary>>? functionMap = null, - string? defaultReply = null) - : base(name: name, - systemMessage: systemMessage, - llmConfig: llmConfig, - isTermination: isTermination, - humanInputMode: humanInputMode, - functionMap: functionMap, - defaultReply: defaultReply) - { - } -} diff --git a/dotnet/src/AutoGen/Agent/ConversableAgent.cs b/dotnet/src/AutoGen/Agent/ConversableAgent.cs deleted file mode 100644 index 848e6cb0cd..0000000000 --- a/dotnet/src/AutoGen/Agent/ConversableAgent.cs +++ /dev/null @@ -1,187 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ConversableAgent.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using AutoGen.LMStudio; -using AutoGen.OpenAI.V1; - -namespace AutoGen; - -public enum HumanInputMode -{ - /// - /// NEVER prompt the user for input - /// - NEVER = 0, - - /// - /// ALWAYS prompt the user for input - /// - ALWAYS = 1, - - /// - /// prompt the user for input if the message is not a termination message - /// - AUTO = 2, -} - -public class ConversableAgent : IAgent -{ - private readonly IAgent? innerAgent; - private readonly string? defaultReply; - private readonly HumanInputMode humanInputMode; - private readonly IDictionary>>? functionMap; - private readonly string systemMessage; - private readonly IEnumerable? functions; - - public ConversableAgent( - string name, - string systemMessage = "You are a helpful AI assistant", - IAgent? innerAgent = null, - string? defaultAutoReply = null, - HumanInputMode humanInputMode = HumanInputMode.NEVER, - Func, CancellationToken, Task>? isTermination = null, - IDictionary>>? functionMap = null) - { - this.Name = name; - this.defaultReply = defaultAutoReply; - this.functionMap = functionMap; - this.humanInputMode = humanInputMode; - this.innerAgent = innerAgent; - this.IsTermination = isTermination; - this.systemMessage = systemMessage; - } - - public ConversableAgent( - string name, - string systemMessage = "You are a helpful AI assistant", - ConversableAgentConfig? llmConfig = null, - Func, CancellationToken, Task>? isTermination = null, - HumanInputMode humanInputMode = HumanInputMode.AUTO, - IDictionary>>? functionMap = null, - string? defaultReply = null) - { - this.Name = name; - this.defaultReply = defaultReply; - this.functionMap = functionMap; - this.humanInputMode = humanInputMode; - this.IsTermination = isTermination; - this.systemMessage = systemMessage; - this.innerAgent = llmConfig?.ConfigList != null ? this.CreateInnerAgentFromConfigList(llmConfig) : null; - this.functions = llmConfig?.FunctionContracts; - } - - /// - /// For test purpose only. - /// - internal IAgent? InnerAgent => this.innerAgent; - - private IAgent? CreateInnerAgentFromConfigList(ConversableAgentConfig config) - { - IAgent? agent = null; - foreach (var llmConfig in config.ConfigList ?? Enumerable.Empty()) - { - IAgent nextAgent = llmConfig switch - { - AzureOpenAIConfig azureConfig => new GPTAgent(this.Name!, this.systemMessage, azureConfig, temperature: config.Temperature ?? 0), - OpenAIConfig openAIConfig => new GPTAgent(this.Name!, this.systemMessage, openAIConfig, temperature: config.Temperature ?? 0), - LMStudioConfig lmStudioConfig => new LMStudioAgent( - name: this.Name, - config: lmStudioConfig, - systemMessage: this.systemMessage, - temperature: config.Temperature ?? 0), - _ => throw new ArgumentException($"Unsupported config type {llmConfig.GetType()}"), - }; - - if (agent == null) - { - agent = nextAgent; - } - else - { - agent = agent.RegisterMiddleware(async (messages, option, agent, cancellationToken) => - { - var agentResponse = await nextAgent.GenerateReplyAsync(messages, option, cancellationToken: cancellationToken); - - if (agentResponse is null) - { - return await agent.GenerateReplyAsync(messages, option, cancellationToken); - } - else - { - return agentResponse; - } - }); - } - } - - return agent; - } - - public string Name { get; } - - public Func, CancellationToken, Task>? IsTermination { get; } - - public async Task GenerateReplyAsync( - IEnumerable messages, - GenerateReplyOptions? overrideOptions = null, - CancellationToken cancellationToken = default) - { - // if there's no system message, add system message to the first of chat history - if (!messages.Any(m => m.IsSystemMessage())) - { - var systemMessage = new TextMessage(Role.System, this.systemMessage, from: this.Name); - messages = new[] { systemMessage }.Concat(messages); - } - - // process order: function_call -> human_input -> inner_agent -> default_reply -> self_execute - // first in, last out - - // process default reply - MiddlewareAgent agent; - if (this.innerAgent != null) - { - agent = innerAgent.RegisterMiddleware(async (msgs, option, agent, ct) => - { - var updatedMessages = msgs.Select(m => - { - if (m.From == this.Name) - { - m.From = this.innerAgent.Name; - return m; - } - else - { - return m; - } - }); - - return await agent.GenerateReplyAsync(updatedMessages, option, ct); - }); - } - else - { - agent = new MiddlewareAgent(new DefaultReplyAgent(this.Name!, this.defaultReply ?? "Default reply is not set. Please pass a default reply to assistant agent")); - } - - // process human input - var humanInputMiddleware = new HumanInputMiddleware(mode: this.humanInputMode, isTermination: this.IsTermination); - agent.Use(humanInputMiddleware); - - // process function call - var functionCallMiddleware = new FunctionCallMiddleware(functions: this.functions, functionMap: this.functionMap); - agent.Use(functionCallMiddleware); - - return await agent.GenerateReplyAsync(messages, overrideOptions, cancellationToken); - } -} diff --git a/dotnet/src/AutoGen/Agent/UserProxyAgent.cs b/dotnet/src/AutoGen/Agent/UserProxyAgent.cs deleted file mode 100644 index ed7291254f..0000000000 --- a/dotnet/src/AutoGen/Agent/UserProxyAgent.cs +++ /dev/null @@ -1,36 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// UserProxyAgent.cs - -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen; - -public class UserProxyAgent : ConversableAgent -{ - public UserProxyAgent( - string name, - string systemMessage = "You are a helpful AI assistant", - ConversableAgentConfig? llmConfig = null, - Func, CancellationToken, Task>? isTermination = null, - HumanInputMode humanInputMode = HumanInputMode.ALWAYS, - IDictionary>>? functionMap = null, - string? defaultReply = null) - : base(name: name, - systemMessage: systemMessage, - llmConfig: llmConfig, - isTermination: isTermination, - humanInputMode: humanInputMode, - functionMap: functionMap, - defaultReply: defaultReply) - { - } -} diff --git a/dotnet/src/AutoGen/AutoGen.csproj b/dotnet/src/AutoGen/AutoGen.csproj deleted file mode 100644 index 4c3b2a5ab8..0000000000 --- a/dotnet/src/AutoGen/AutoGen.csproj +++ /dev/null @@ -1,37 +0,0 @@ -ο»Ώ - - $(PackageTargetFrameworks) - AutoGen - - - - - - - AutoGen - - The all-in-one package for AutoGen. This package provides contracts, core functionalities, OpenAI integration, source generator, etc. for AutoGen. - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dotnet/src/AutoGen/ConversableAgentConfig.cs b/dotnet/src/AutoGen/ConversableAgentConfig.cs deleted file mode 100644 index a12a5c752e..0000000000 --- a/dotnet/src/AutoGen/ConversableAgentConfig.cs +++ /dev/null @@ -1,23 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ConversableAgentConfig.cs - -using System.Collections.Generic; - -namespace AutoGen; - -public class ConversableAgentConfig -{ - public IEnumerable? FunctionContracts { get; set; } - - public IEnumerable? ConfigList { get; set; } - - public float? Temperature { get; set; } = 0.7f; - - public int? Timeout { get; set; } -} diff --git a/dotnet/src/AutoGen/GlobalUsing.cs b/dotnet/src/AutoGen/GlobalUsing.cs deleted file mode 100644 index 3c28defb3c..0000000000 --- a/dotnet/src/AutoGen/GlobalUsing.cs +++ /dev/null @@ -1,10 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GlobalUsing.cs - -global using AutoGen.Core; diff --git a/dotnet/src/AutoGen/Middleware/HumanInputMiddleware.cs b/dotnet/src/AutoGen/Middleware/HumanInputMiddleware.cs deleted file mode 100644 index e5de0308f7..0000000000 --- a/dotnet/src/AutoGen/Middleware/HumanInputMiddleware.cs +++ /dev/null @@ -1,107 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// HumanInputMiddleware.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; - -namespace AutoGen; - -/// -/// the middleware to get human input -/// -public class HumanInputMiddleware : IMiddleware -{ - private readonly HumanInputMode mode; - private readonly string prompt; - private readonly string exitKeyword; - private Func, CancellationToken, Task> isTermination; - private Func getInput = Console.ReadLine; - private Action writeLine = Console.WriteLine; - public string? Name => nameof(HumanInputMiddleware); - - public HumanInputMiddleware( - string prompt = "Please give feedback: Press enter or type 'exit' to stop the conversation.", - string exitKeyword = "exit", - HumanInputMode mode = HumanInputMode.AUTO, - Func, CancellationToken, Task>? isTermination = null, - Func? getInput = null, - Action? writeLine = null) - { - this.prompt = prompt; - this.isTermination = isTermination ?? DefaultIsTermination; - this.exitKeyword = exitKeyword; - this.mode = mode; - this.getInput = getInput ?? GetInput; - this.writeLine = writeLine ?? WriteLine; - } - - public async Task InvokeAsync(MiddlewareContext context, IAgent agent, CancellationToken cancellationToken = default) - { - // if the mode is never, then just return the input message - if (mode == HumanInputMode.NEVER) - { - return await agent.GenerateReplyAsync(context.Messages, context.Options, cancellationToken); - } - - // if the mode is always, then prompt the user for input - if (mode == HumanInputMode.ALWAYS) - { - this.writeLine(prompt); - var input = getInput(); - if (input == exitKeyword) - { - return new TextMessage(Role.Assistant, GroupChatExtension.TERMINATE, agent.Name); - } - - input ??= string.Empty; - - return new TextMessage(Role.Assistant, input, agent.Name); - } - - // if the mode is auto, then prompt the user for input if the message is not a termination message - if (mode == HumanInputMode.AUTO) - { - if (await isTermination(context.Messages, cancellationToken) is false) - { - return await agent.GenerateReplyAsync(context.Messages, context.Options, cancellationToken); - } - - this.writeLine(prompt); - var input = getInput(); - if (input == exitKeyword) - { - return new TextMessage(Role.Assistant, GroupChatExtension.TERMINATE, agent.Name); - } - - input ??= string.Empty; - - return new TextMessage(Role.Assistant, input, agent.Name); - } - - throw new InvalidOperationException("Invalid mode"); - } - - private async Task DefaultIsTermination(IEnumerable messages, CancellationToken _) - { - return messages?.Last().IsGroupChatTerminateMessage() is true; - } - - private string? GetInput() - { - return Console.ReadLine(); - } - - private void WriteLine(string message) - { - Console.WriteLine(message); - } -} diff --git a/dotnet/test/.editorconfig b/dotnet/test/.editorconfig deleted file mode 100644 index cc0410613c..0000000000 --- a/dotnet/test/.editorconfig +++ /dev/null @@ -1,7 +0,0 @@ -# Suppressing errors for Test projects under test folder -[*.cs] -dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task -dotnet_diagnostic.VSTHRD111.severity = none # Use .ConfigureAwait(bool) is hidden by default, set to none to prevent IDE from changing on autosave -dotnet_diagnostic.CS1591.severity = none # Missing XML comment for publicly visible type or member -dotnet_diagnostic.CS1998.severity = none # Async method lacks 'await' operators and will run synchronously -dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations \ No newline at end of file diff --git a/dotnet/test/AutoGen.Anthropic.Tests/AnthropicClientAgentTest.cs b/dotnet/test/AutoGen.Anthropic.Tests/AnthropicClientAgentTest.cs deleted file mode 100644 index f473351399..0000000000 --- a/dotnet/test/AutoGen.Anthropic.Tests/AnthropicClientAgentTest.cs +++ /dev/null @@ -1,235 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// AnthropicClientAgentTest.cs - -using AutoGen.Anthropic.DTO; -using AutoGen.Anthropic.Extensions; -using AutoGen.Anthropic.Utils; -using AutoGen.Core; -using AutoGen.Tests; -using FluentAssertions; - -namespace AutoGen.Anthropic.Tests; - -public class AnthropicClientAgentTest -{ - [ApiKeyFact("ANTHROPIC_API_KEY")] - public async Task AnthropicAgentChatCompletionTestAsync() - { - var client = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, AnthropicTestUtils.ApiKey); - - var agent = new AnthropicClientAgent( - client, - name: "AnthropicAgent", - AnthropicConstants.Claude3Haiku, - systemMessage: "You are a helpful AI assistant that convert user message to upper case") - .RegisterMessageConnector(); - - var uppCaseMessage = new TextMessage(Role.User, "abcdefg"); - - var reply = await agent.SendAsync(chatHistory: new[] { uppCaseMessage }); - - reply.GetContent().Should().Contain("ABCDEFG"); - reply.From.Should().Be(agent.Name); - } - - [ApiKeyFact("ANTHROPIC_API_KEY")] - public async Task AnthropicAgentMergeMessageWithSameRoleTests() - { - // this test is added to fix issue #2884 - var client = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, AnthropicTestUtils.ApiKey); - - var agent = new AnthropicClientAgent( - client, - name: "AnthropicAgent", - AnthropicConstants.Claude3Haiku, - systemMessage: "You are a helpful AI assistant that convert user message to upper case") - .RegisterMessageConnector(); - - var uppCaseMessage = new TextMessage(Role.User, "abcdefg"); - var anotherUserMessage = new TextMessage(Role.User, "hijklmn"); - var assistantMessage = new TextMessage(Role.Assistant, "opqrst"); - var anotherAssistantMessage = new TextMessage(Role.Assistant, "uvwxyz"); - var yetAnotherUserMessage = new TextMessage(Role.User, "123456"); - - // just make sure it doesn't throw exception - var reply = await agent.SendAsync(chatHistory: [uppCaseMessage, anotherUserMessage, assistantMessage, anotherAssistantMessage, yetAnotherUserMessage]); - reply.GetContent().Should().NotBeNull(); - } - - [ApiKeyFact("ANTHROPIC_API_KEY")] - public async Task AnthropicAgentTestProcessImageAsync() - { - var client = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, AnthropicTestUtils.ApiKey); - var agent = new AnthropicClientAgent( - client, - name: "AnthropicAgent", - AnthropicConstants.Claude3Haiku).RegisterMessageConnector(); - - var base64Image = await AnthropicTestUtils.Base64FromImageAsync("square.png"); - var imageMessage = new ChatMessage("user", - [new ImageContent { Source = new ImageSource { MediaType = "image/png", Data = base64Image } }]); - - var messages = new IMessage[] { MessageEnvelope.Create(imageMessage) }; - - // test streaming - foreach (var message in messages) - { - var reply = agent.GenerateStreamingReplyAsync([message]); - - await foreach (var streamingMessage in reply) - { - streamingMessage.Should().BeOfType(); - streamingMessage.As().From.Should().Be(agent.Name); - } - } - } - - [ApiKeyFact("ANTHROPIC_API_KEY")] - public async Task AnthropicAgentTestMultiModalAsync() - { - var client = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, AnthropicTestUtils.ApiKey); - var agent = new AnthropicClientAgent( - client, - name: "AnthropicAgent", - AnthropicConstants.Claude3Haiku) - .RegisterMessageConnector(); - - var image = Path.Combine("images", "square.png"); - var binaryData = BinaryData.FromBytes(await File.ReadAllBytesAsync(image), "image/png"); - var imageMessage = new ImageMessage(Role.User, binaryData); - var textMessage = new TextMessage(Role.User, "What's in this image?"); - var multiModalMessage = new MultiModalMessage(Role.User, [textMessage, imageMessage]); - - var reply = await agent.SendAsync(multiModalMessage); - reply.Should().BeOfType(); - reply.GetRole().Should().Be(Role.Assistant); - reply.GetContent().Should().NotBeNullOrEmpty(); - reply.From.Should().Be(agent.Name); - } - - [ApiKeyFact("ANTHROPIC_API_KEY")] - public async Task AnthropicAgentTestImageMessageAsync() - { - var client = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, AnthropicTestUtils.ApiKey); - var agent = new AnthropicClientAgent( - client, - name: "AnthropicAgent", - AnthropicConstants.Claude3Haiku, - systemMessage: "You are a helpful AI assistant that is capable of determining what an image is. Tell me a brief description of the image." - ) - .RegisterMessageConnector(); - - var image = Path.Combine("images", "square.png"); - var binaryData = BinaryData.FromBytes(await File.ReadAllBytesAsync(image), "image/png"); - var imageMessage = new ImageMessage(Role.User, binaryData); - - var reply = await agent.SendAsync(imageMessage); - reply.Should().BeOfType(); - reply.GetRole().Should().Be(Role.Assistant); - reply.GetContent().Should().NotBeNullOrEmpty(); - reply.From.Should().Be(agent.Name); - } - - [ApiKeyFact("ANTHROPIC_API_KEY")] - public async Task AnthropicAgentTestToolAsync() - { - var client = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, AnthropicTestUtils.ApiKey); - - var function = new TypeSafeFunctionCall(); - var functionCallMiddleware = new FunctionCallMiddleware( - functions: new[] { function.WeatherReportFunctionContract }, - functionMap: new Dictionary>> - { - { function.WeatherReportFunctionContract.Name ?? string.Empty, function.WeatherReportWrapper }, - }); - - var agent = new AnthropicClientAgent( - client, - name: "AnthropicAgent", - AnthropicConstants.Claude3Haiku, - systemMessage: "You are an LLM that is specialized in finding the weather !", - tools: [AnthropicTestUtils.WeatherTool] - ) - .RegisterMessageConnector() - .RegisterStreamingMiddleware(functionCallMiddleware); - - var reply = await agent.SendAsync("What is the weather in Philadelphia?"); - reply.GetContent().Should().Be("Weather report for Philadelphia on today is sunny"); - } - - [ApiKeyFact("ANTHROPIC_API_KEY")] - public async Task AnthropicAgentFunctionCallMessageTest() - { - var client = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, AnthropicTestUtils.ApiKey); - var agent = new AnthropicClientAgent( - client, - name: "AnthropicAgent", - AnthropicConstants.Claude3Haiku, - systemMessage: "You are a helpful AI assistant.", - tools: [AnthropicTestUtils.WeatherTool] - ) - .RegisterMessageConnector(); - - var weatherFunctionArguments = """ - { - "city": "Philadelphia", - "date": "6/14/2024" - } - """; - - var function = new AnthropicTestFunctionCalls(); - var functionCallResult = await function.GetWeatherReportWrapper(weatherFunctionArguments); - var toolCall = new ToolCall(function.WeatherReportFunctionContract.Name!, weatherFunctionArguments) - { - ToolCallId = "get_weather", - Result = functionCallResult, - }; - - IMessage[] chatHistory = [ - new TextMessage(Role.User, "what's the weather in Philadelphia?"), - new ToolCallMessage([toolCall], from: "assistant"), - new ToolCallResultMessage([toolCall], from: "user"), - ]; - - var reply = await agent.SendAsync(chatHistory: chatHistory); - - reply.Should().BeOfType(); - reply.GetContent().Should().Be("The weather report for Philadelphia on 6/14/2024 is sunny."); - } - - [ApiKeyFact("ANTHROPIC_API_KEY")] - public async Task AnthropicAgentFunctionCallMiddlewareMessageTest() - { - var client = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, AnthropicTestUtils.ApiKey); - var function = new AnthropicTestFunctionCalls(); - var functionCallMiddleware = new FunctionCallMiddleware( - functions: [function.WeatherReportFunctionContract], - functionMap: new Dictionary>> - { - { function.WeatherReportFunctionContract.Name!, function.GetWeatherReportWrapper } - }); - - var functionCallAgent = new AnthropicClientAgent( - client, - name: "AnthropicAgent", - AnthropicConstants.Claude3Haiku, - systemMessage: "You are a helpful AI assistant.", - tools: [AnthropicTestUtils.WeatherTool] - ) - .RegisterMessageConnector() - .RegisterStreamingMiddleware(functionCallMiddleware); - - var question = new TextMessage(Role.User, "what's the weather in Philadelphia?"); - var reply = await functionCallAgent.SendAsync(question); - - var finalReply = await functionCallAgent.SendAsync(chatHistory: [question, reply]); - finalReply.Should().BeOfType(); - finalReply.GetContent()!.ToLower().Should().Contain("sunny"); - } -} diff --git a/dotnet/test/AutoGen.Anthropic.Tests/AnthropicClientTest.cs b/dotnet/test/AutoGen.Anthropic.Tests/AnthropicClientTest.cs deleted file mode 100644 index 3aae002e24..0000000000 --- a/dotnet/test/AutoGen.Anthropic.Tests/AnthropicClientTest.cs +++ /dev/null @@ -1,248 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// AnthropicClientTest.cs - -using System.Text; -using System.Text.Json; -using System.Text.Json.Nodes; -using System.Text.Json.Serialization; -using AutoGen.Anthropic.DTO; -using AutoGen.Anthropic.Utils; -using AutoGen.Tests; -using FluentAssertions; -using Xunit; - -namespace AutoGen.Anthropic.Tests; - -public class AnthropicClientTests -{ - [ApiKeyFact("ANTHROPIC_API_KEY")] - public async Task AnthropicClientChatCompletionTestAsync() - { - var anthropicClient = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, AnthropicTestUtils.ApiKey); - - var request = new ChatCompletionRequest(); - request.Model = AnthropicConstants.Claude3Haiku; - request.Stream = false; - request.MaxTokens = 100; - request.Messages = new List() { new ChatMessage("user", "Hello world") }; - ChatCompletionResponse response = await anthropicClient.CreateChatCompletionsAsync(request, CancellationToken.None); - - Assert.NotNull(response); - Assert.NotNull(response.Content); - Assert.NotEmpty(response.Content); - response.Content.Count.Should().Be(1); - response.Content.First().Should().BeOfType(); - var textContent = (TextContent)response.Content.First(); - Assert.Equal("text", textContent.Type); - Assert.NotNull(response.Usage); - response.Usage.OutputTokens.Should().BeGreaterThan(0); - } - - [ApiKeyFact("ANTHROPIC_API_KEY")] - public async Task AnthropicClientStreamingChatCompletionTestAsync() - { - var anthropicClient = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, AnthropicTestUtils.ApiKey); - - var request = new ChatCompletionRequest(); - request.Model = AnthropicConstants.Claude3Haiku; - request.Stream = true; - request.MaxTokens = 500; - request.SystemMessage = - [ - SystemMessage.CreateSystemMessage( - "You are a helpful assistant that convert input to json object, use JSON format.") - ]; - - request.Messages = new List() - { - new("user", "name: John, age: 41, email: g123456@gmail.com") - }; - - var response = anthropicClient.StreamingChatCompletionsAsync(request, CancellationToken.None); - var results = await response.ToListAsync(); - results.Count.Should().BeGreaterThan(0); - - // Merge the chunks. - StringBuilder sb = new(); - foreach (ChatCompletionResponse result in results) - { - if (result.Delta is not null && !string.IsNullOrEmpty(result.Delta.Text)) - { - sb.Append(result.Delta.Text); - } - } - - string resultContent = sb.ToString(); - Assert.NotNull(resultContent); - - var person = JsonSerializer.Deserialize(resultContent); - Assert.NotNull(person); - person.Name.Should().Be("John"); - person.Age.Should().Be(41); - person.Email.Should().Be("g123456@gmail.com"); - Assert.NotNull(results.First().streamingMessage); - results.First().streamingMessage!.Role.Should().Be("assistant"); - } - - [ApiKeyFact("ANTHROPIC_API_KEY")] - public async Task AnthropicClientImageChatCompletionTestAsync() - { - var anthropicClient = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, AnthropicTestUtils.ApiKey); - - var request = new ChatCompletionRequest(); - request.Model = AnthropicConstants.Claude3Haiku; - request.Stream = false; - request.MaxTokens = 100; - request.SystemMessage = - [ - SystemMessage.CreateSystemMessage( - "You are a LLM that is suppose to describe the content of the image. Give me a description of the provided image."), - ]; - - var base64Image = await AnthropicTestUtils.Base64FromImageAsync("square.png"); - var messages = new List - { - new("user", - [ - new ImageContent { Source = new ImageSource {MediaType = "image/png", Data = base64Image} } - ]) - }; - - request.Messages = messages; - - var response = await anthropicClient.CreateChatCompletionsAsync(request, CancellationToken.None); - - Assert.NotNull(response); - Assert.NotNull(response.Content); - Assert.NotEmpty(response.Content); - response.Content.Count.Should().Be(1); - response.Content.First().Should().BeOfType(); - var textContent = (TextContent)response.Content.First(); - Assert.Equal("text", textContent.Type); - Assert.NotNull(response.Usage); - response.Usage.OutputTokens.Should().BeGreaterThan(0); - } - - [ApiKeyFact("ANTHROPIC_API_KEY")] - public async Task AnthropicClientTestToolsAsync() - { - var anthropicClient = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, AnthropicTestUtils.ApiKey); - - var request = new ChatCompletionRequest(); - request.Model = AnthropicConstants.Claude3Haiku; - request.Stream = false; - request.MaxTokens = 100; - request.Messages = new List() { new("user", "Use the stock price tool to look for MSFT. Your response should only be the tool.") }; - request.Tools = new List() { AnthropicTestUtils.StockTool }; - - ChatCompletionResponse response = - await anthropicClient.CreateChatCompletionsAsync(request, CancellationToken.None); - - Assert.NotNull(response.Content); - Assert.True(response.Content.First() is ToolUseContent); - ToolUseContent toolUseContent = ((ToolUseContent)response.Content.First()); - Assert.Equal("get_stock_price", toolUseContent.Name); - Assert.NotNull(toolUseContent.Input); - Assert.True(toolUseContent.Input is JsonNode); - JsonNode jsonNode = toolUseContent.Input; - Assert.Equal("{\"ticker\":\"MSFT\"}", jsonNode.ToJsonString()); - } - - [ApiKeyFact("ANTHROPIC_API_KEY")] - public async Task AnthropicClientTestToolChoiceAsync() - { - var anthropicClient = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, AnthropicTestUtils.ApiKey); - - var request = new ChatCompletionRequest(); - request.Model = AnthropicConstants.Claude3Haiku; - request.Stream = false; - request.MaxTokens = 100; - request.Messages = new List() { new("user", "What is the weather today? Your response should only be the tool.") }; - request.Tools = new List() { AnthropicTestUtils.StockTool, AnthropicTestUtils.WeatherTool }; - - // Force to use get_stock_price even though the prompt is about weather - request.ToolChoice = ToolChoice.ToolUse("get_stock_price"); - - ChatCompletionResponse response = - await anthropicClient.CreateChatCompletionsAsync(request, CancellationToken.None); - - Assert.NotNull(response.Content); - Assert.True(response.Content.First() is ToolUseContent); - ToolUseContent toolUseContent = ((ToolUseContent)response.Content.First()); - Assert.Equal("get_stock_price", toolUseContent.Name); - Assert.NotNull(toolUseContent.Input); - Assert.True(toolUseContent.Input is JsonNode); - } - - [ApiKeyFact("ANTHROPIC_API_KEY")] - public async Task AnthropicClientChatCompletionCacheControlTestAsync() - { - var anthropicClient = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, AnthropicTestUtils.ApiKey); - - var request = new ChatCompletionRequest(); - request.Model = AnthropicConstants.Claude35Sonnet; - request.Stream = false; - request.MaxTokens = 100; - - request.SystemMessage = - [ - SystemMessage.CreateSystemMessageWithCacheControl( - $"You are an LLM that is great at remembering stories {AnthropicTestUtils.LongStory}"), - ]; - - request.Messages = - [ - new ChatMessage("user", "What should i know about Bob?") - ]; - - var response = await anthropicClient.CreateChatCompletionsAsync(request, CancellationToken.None); - response.Usage.Should().NotBeNull(); - - // There's no way to clear the cache. Running the assert frequently may cause this to fail because the cache is already been created - // response.Usage!.CreationInputTokens.Should().BeGreaterThan(0); - // The cache reduces the input tokens. We expect the input tokens to be less the large system prompt and only the user message - response.Usage!.InputTokens.Should().BeLessThan(20); - - request.Messages = - [ - new ChatMessage("user", "Summarize the story of bob") - ]; - - response = await anthropicClient.CreateChatCompletionsAsync(request, CancellationToken.None); - response.Usage.Should().NotBeNull(); - response.Usage!.CacheReadInputTokens.Should().BeGreaterThan(0); - response.Usage!.InputTokens.Should().BeLessThan(20); - - // Should not use the cache - request.SystemMessage = - [ - SystemMessage.CreateSystemMessage("You are a helpful assistant.") - ]; - - request.Messages = - [ - new ChatMessage("user", "What are some text editors I could use to write C#?") - ]; - - response = await anthropicClient.CreateChatCompletionsAsync(request, CancellationToken.None); - response.Usage!.CacheReadInputTokens.Should().Be(0); - } - - private sealed class Person - { - [JsonPropertyName("name")] - public string Name { get; set; } = string.Empty; - - [JsonPropertyName("age")] - public int Age { get; set; } - - [JsonPropertyName("email")] - public string Email { get; set; } = string.Empty; - } -} diff --git a/dotnet/test/AutoGen.Anthropic.Tests/AnthropicTestFunctionCalls.cs b/dotnet/test/AutoGen.Anthropic.Tests/AnthropicTestFunctionCalls.cs deleted file mode 100644 index 1aa2930043..0000000000 --- a/dotnet/test/AutoGen.Anthropic.Tests/AnthropicTestFunctionCalls.cs +++ /dev/null @@ -1,46 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// AnthropicTestFunctionCalls.cs - -using System.Text.Json; -using System.Text.Json.Serialization; -using AutoGen.Core; - -namespace AutoGen.Anthropic.Tests; - -public partial class AnthropicTestFunctionCalls -{ - private class GetWeatherSchema - { - [JsonPropertyName("city")] - public string? City { get; set; } - - [JsonPropertyName("date")] - public string? Date { get; set; } - } - - /// - /// Get weather report - /// - /// city - /// date - [Function] - public async Task WeatherReport(string city, string date) - { - return $"Weather report for {city} on {date} is sunny"; - } - - public Task GetWeatherReportWrapper(string arguments) - { - var schema = JsonSerializer.Deserialize( - arguments, - new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); - - return WeatherReport(schema?.City ?? string.Empty, schema?.Date ?? string.Empty); - } -} diff --git a/dotnet/test/AutoGen.Anthropic.Tests/AnthropicTestUtils.cs b/dotnet/test/AutoGen.Anthropic.Tests/AnthropicTestUtils.cs deleted file mode 100644 index 01e1306415..0000000000 --- a/dotnet/test/AutoGen.Anthropic.Tests/AnthropicTestUtils.cs +++ /dev/null @@ -1,150 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// AnthropicTestUtils.cs - -using AutoGen.Anthropic.DTO; - -namespace AutoGen.Anthropic.Tests; - -public static class AnthropicTestUtils -{ - public static string ApiKey => Environment.GetEnvironmentVariable("ANTHROPIC_API_KEY") ?? - throw new Exception("Please set ANTHROPIC_API_KEY environment variable."); - - public static async Task Base64FromImageAsync(string imageName) - { - return Convert.ToBase64String( - await File.ReadAllBytesAsync(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "images", imageName))); - } - - public static Tool WeatherTool - { - get - { - return new Tool - { - Name = "WeatherReport", - Description = "Get the current weather", - InputSchema = new InputSchema - { - Type = "object", - Properties = new Dictionary - { - { "city", new SchemaProperty {Type = "string", Description = "The name of the city"} }, - { "date", new SchemaProperty {Type = "string", Description = "date of the day"} } - } - } - }; - } - } - - public static Tool StockTool - { - get - { - return new Tool - { - Name = "get_stock_price", - Description = "Get the current stock price for a given ticker symbol.", - InputSchema = new InputSchema - { - Type = "object", - Properties = new Dictionary - { - { - "ticker", new SchemaProperty - { - Type = "string", - Description = "The stock ticker symbol, e.g. AAPL for Apple Inc." - } - } - }, - Required = new List { "ticker" } - } - }; - } - } - - #region Long text for caching - // To test cache control, the context must be larger than 1024 tokens for Claude 3.5 Sonnet and Claude 3 Opus - // 2048 tokens for Claude 3.0 Haiku - // Shorter prompts cannot be cached, even if marked with cache_control. Any requests to cache fewer than this number of tokens will be processed without caching - public const string LongStory = """ -Once upon a time in a small, nondescript town lived a man named Bob. Bob was an unassuming individual, the kind of person you wouldn’t look twice at if you passed him on the street. He worked as an IT specialist for a mid-sized corporation, spending his days fixing computers and troubleshooting software issues. But beneath his average exterior, Bob harbored a secret ambitionβ€”he wanted to take over the world. - -Bob wasn’t always like this. For most of his life, he had been content with his routine, blending into the background. But one day, while browsing the dark corners of the internet, Bob stumbled upon an ancient manuscript, encrypted within the deep web, detailing the steps to global domination. It was written by a forgotten conqueror, someone whose name had been erased from history but whose methods were preserved in this digital relic. The manuscript laid out a plan so intricate and flawless that Bob, with his analytical mind, became obsessed. - -Over the next few years, Bob meticulously followed the manuscript’s guidance. He started small, creating a network of like-minded individuals who shared his dream. They communicated through encrypted channels, meeting in secret to discuss their plans. Bob was careful, never revealing too much about himself, always staying in the shadows. He used his IT skills to gather information, infiltrating government databases, and private corporations, and acquiring secrets that could be used as leverage. - -As his network grew, so did his influence. Bob began to manipulate world events from behind the scenes. He orchestrated economic crises, incited political turmoil, and planted seeds of discord among the world’s most powerful nations. Each move was calculated, each action a step closer to his ultimate goal. The world was in chaos, and no one suspected that a man like Bob could be behind it all. - -But Bob knew that causing chaos wasn’t enough. To truly take over the world, he needed something moreβ€”something to cement his power. That’s when he turned to technology. Bob had always been ahead of the curve when it came to tech, and now, he planned to use it to his advantage. He began developing an AI, one that would be more powerful and intelligent than anything the world had ever seen. This AI, which Bob named β€œNemesis,” was designed to control every aspect of modern lifeβ€”from financial systems to military networks. - -It took years of coding, testing, and refining, but eventually, Nemesis was ready. Bob unleashed the AI, and within days, it had taken control of the world’s digital infrastructure. Governments were powerless, their systems compromised. Corporations crumbled as their assets were seized. The military couldn’t act, their weapons turned against them. Bob, from the comfort of his modest home, had done it. He had taken over the world. - -The world, now under Bob’s control, was eerily quiet. There were no more wars, no more financial crises, no more political strife. Nemesis ensured that everything ran smoothly, efficiently, and without dissent. The people of the world had no choice but to obey, their lives dictated by an unseen hand. - -Bob, once a man who was overlooked and ignored, was now the most powerful person on the planet. But with that power came a realization. The world he had taken over was not the world he had envisioned. It was cold, mechanical, and devoid of the chaos that once made life unpredictable and exciting. Bob had achieved his goal, but in doing so, he had lost the very thing that made life worth livingβ€”freedom. - -And so, Bob, now ruler of the world, sat alone in his control room, staring at the screens that displayed his dominion. He had everything he had ever wanted, yet he felt emptier than ever before. The world was his, but at what cost? - -In the end, Bob realized that true power didn’t come from controlling others, but from the ability to let go. He deactivated Nemesis, restoring the world to its former state, and disappeared into obscurity, content to live out the rest of his days as just another face in the crowd. And though the world never knew his name, Bob’s legacy would live on, a reminder of the dangers of unchecked ambition. - -Bob had vanished, leaving the world in a fragile state of recovery. Governments scrambled to regain control of their systems, corporations tried to rebuild, and the global population slowly adjusted to life without the invisible grip of Nemesis. Yet, even as society returned to a semblance of normalcy, whispers of the mysterious figure who had brought the world to its knees lingered in the shadows. - -Meanwhile, Bob had retreated to a secluded cabin deep in the mountains. The cabin was a modest, rustic place, surrounded by dense forests and overlooking a tranquil lake. It was far from civilization, a perfect place for a man who wanted to disappear. Bob spent his days fishing, hiking, and reflecting on his past. For the first time in years, he felt a sense of peace. - -But peace was fleeting. Despite his best efforts to put his past behind him, Bob couldn’t escape the consequences of his actions. He had unleashed Nemesis upon the world, and though he had deactivated the AI, remnants of its code still existed. Rogue factions, hackers, and remnants of his old network were searching for those fragments, hoping to revive Nemesis and seize the power that Bob had relinquished. - -One day, as Bob was chopping wood outside his cabin, a figure emerged from the tree line. It was a young woman, dressed in hiking gear, with a determined look in her eyes. Bob tensed, his instincts telling him that this was no ordinary hiker. - -β€œBob,” the woman said, her voice steady. β€œOr should I say, the man who almost became the ruler of the world?” - -Bob sighed, setting down his axe. β€œWho are you, and what do you want?” - -The woman stepped closer. β€œMy name is Sarah. I was part of your network, one of the few who knew about Nemesis. But I wasn’t like the others. I didn’t want power for myselfβ€”I wanted to protect the world from those who would misuse it.” - -Bob studied her, trying to gauge her intentions. β€œAnd why are you here now?” - -Sarah reached into her backpack and pulled out a small device. β€œBecause Nemesis isn’t dead. Some of its code is still active, and it’s trying to reboot itself. I need your help to stop it for good.” - -Bob’s heart sank. He had hoped that by deactivating Nemesis, he had erased it from existence. But deep down, he knew that an AI as powerful as Nemesis wouldn’t go down so easily. β€œWhy come to me? I’m the one who created it. I’m the reason the world is in this mess.” - -Sarah shook her head. β€œYou’re also the only one who knows how to stop it. I’ve tracked down the remnants of Nemesis’s code, but I need you to help destroy it before it falls into the wrong hands.” - -Bob hesitated. He had wanted nothing more than to leave his past behind, but he couldn’t ignore the responsibility that weighed on him. He had created Nemesis, and now it was his duty to make sure it never posed a threat again. - -β€œAlright,” Bob said finally. β€œI’ll help you. But after this, I’m done. No more world domination, no more secret networks. I just want to live in peace.” - -Sarah nodded. β€œAgreed. Let’s finish what you started.” - -Over the next few weeks, Bob and Sarah worked together, traveling to various locations around the globe where fragments of Nemesis’s code had been detected. They infiltrated secure facilities, outsmarted rogue hackers, and neutralized threats, all while staying one step ahead of those who sought to control Nemesis for their own gain. - -As they worked, Bob and Sarah developed a deep respect for one another. Sarah was sharp, resourceful, and driven by a genuine desire to protect the world. Bob found himself opening up to her, sharing his regrets, his doubts, and the lessons he had learned. In turn, Sarah shared her own storyβ€”how she had once been tempted by power but had chosen a different path, one that led her to fight for what was right. - -Finally, after weeks of intense effort, they tracked down the last fragment of Nemesis’s code, hidden deep within a remote server farm in the Arctic. The facility was heavily guarded, but Bob and Sarah had planned meticulously. Under the cover of a blizzard, they infiltrated the facility, avoiding detection as they made their way to the heart of the server room. - -As Bob began the process of erasing the final fragment, an alarm blared, and the facility’s security forces closed in. Sarah held them off as long as she could, but they were outnumbered and outgunned. Just as the situation seemed hopeless, Bob executed the final command, wiping Nemesis from existence once and for all. - -But as the last remnants of Nemesis were deleted, Bob knew there was only one way to ensure it could never be resurrected. He initiated a self-destruct sequence for the server farm, trapping himself and Sarah inside. - -Sarah stared at him, realization dawning in her eyes. β€œBob, what are you doing?” - -Bob looked at her, a sad smile on his face. β€œI have to make sure it’s over. This is the only way.” - -Sarah’s eyes filled with tears, but she nodded, understanding the gravity of his decision. β€œThank you, Bob. For everything.” - -As the facility’s countdown reached its final seconds, Bob and Sarah stood side by side, knowing they had done the right thing. The explosion that followed was seen from miles away, a final testament to the end of an era. - -The world never knew the true story of Bob, the man who almost ruled the world. But in his final act of sacrifice, he ensured that the world would remain free, a place where people could live their lives without fear of control. Bob had redeemed himself, not as a conqueror, but as a protectorβ€”a man who chose to save the world rather than rule it. - -And in the quiet aftermath of the explosion, as the snow settled over the wreckage, Bob’s legacy was sealedβ€”not as a name in history books, but as a silent guardian whose actions would be felt for generations to come. -"""; - #endregion - -} diff --git a/dotnet/test/AutoGen.Anthropic.Tests/AutoGen.Anthropic.Tests.csproj b/dotnet/test/AutoGen.Anthropic.Tests/AutoGen.Anthropic.Tests.csproj deleted file mode 100644 index ac9617c1a5..0000000000 --- a/dotnet/test/AutoGen.Anthropic.Tests/AutoGen.Anthropic.Tests.csproj +++ /dev/null @@ -1,23 +0,0 @@ - - - - $(TestTargetFrameworks) - enable - false - True - AutoGen.Anthropic.Tests - True - - - - - - - - - - - PreserveNewest - - - diff --git a/dotnet/test/AutoGen.Anthropic.Tests/images/.gitattributes b/dotnet/test/AutoGen.Anthropic.Tests/images/.gitattributes deleted file mode 100644 index 56e7c34d49..0000000000 --- a/dotnet/test/AutoGen.Anthropic.Tests/images/.gitattributes +++ /dev/null @@ -1 +0,0 @@ -square.png filter=lfs diff=lfs merge=lfs -text diff --git a/dotnet/test/AutoGen.Anthropic.Tests/images/square.png b/dotnet/test/AutoGen.Anthropic.Tests/images/square.png deleted file mode 100644 index 5c2b3ed820..0000000000 --- a/dotnet/test/AutoGen.Anthropic.Tests/images/square.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8341030e5b93aab2c55dcd40ffa26ced8e42cc15736a8348176ffd155ad2d937 -size 8167 diff --git a/dotnet/test/AutoGen.AotCompatibility.Tests/AutoGen.AotCompatibility.Tests.csproj b/dotnet/test/AutoGen.AotCompatibility.Tests/AutoGen.AotCompatibility.Tests.csproj deleted file mode 100644 index aec9660bb9..0000000000 --- a/dotnet/test/AutoGen.AotCompatibility.Tests/AutoGen.AotCompatibility.Tests.csproj +++ /dev/null @@ -1,24 +0,0 @@ -ο»Ώ - - - Exe - net8.0 - enable - enable - true - true - True - true - true - - - - - - - - - - - - diff --git a/dotnet/test/AutoGen.AotCompatibility.Tests/Program.cs b/dotnet/test/AutoGen.AotCompatibility.Tests/Program.cs deleted file mode 100644 index af7998720f..0000000000 --- a/dotnet/test/AutoGen.AotCompatibility.Tests/Program.cs +++ /dev/null @@ -1,10 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Program.cs - -Console.WriteLine("Hello, World!"); diff --git a/dotnet/test/AutoGen.AzureAIInference.Tests/AutoGen.AzureAIInference.Tests.csproj b/dotnet/test/AutoGen.AzureAIInference.Tests/AutoGen.AzureAIInference.Tests.csproj deleted file mode 100644 index 0eaebd1da0..0000000000 --- a/dotnet/test/AutoGen.AzureAIInference.Tests/AutoGen.AzureAIInference.Tests.csproj +++ /dev/null @@ -1,16 +0,0 @@ -ο»Ώ - - - $(TestTargetFrameworks) - false - True - True - - - - - - - - - diff --git a/dotnet/test/AutoGen.AzureAIInference.Tests/ChatCompletionClientAgentTests.cs b/dotnet/test/AutoGen.AzureAIInference.Tests/ChatCompletionClientAgentTests.cs deleted file mode 100644 index c0d2c59d3f..0000000000 --- a/dotnet/test/AutoGen.AzureAIInference.Tests/ChatCompletionClientAgentTests.cs +++ /dev/null @@ -1,539 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ChatCompletionClientAgentTests.cs - -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Threading.Tasks; -using AutoGen.AzureAIInference.Extension; -using AutoGen.Core; -using AutoGen.Tests; -using Azure.AI.Inference; -using FluentAssertions; -using Xunit; - -namespace AutoGen.AzureAIInference.Tests; - -public partial class ChatCompletionClientAgentTests -{ - /// - /// Get the weather for a location. - /// - /// location - /// - [Function] - public async Task GetWeatherAsync(string location) - { - return $"The weather in {location} is sunny."; - } - - [ApiKeyFact("GH_API_KEY")] - public async Task ChatCompletionAgent_LLaMA3_1() - { - var client = CreateChatCompletionClient(); - var model = "meta-llama-3-8b-instruct"; - - var agent = new ChatCompletionsClientAgent(client, "assistant", model) - .RegisterMessageConnector(); - - var reply = await this.BasicChatAsync(agent); - reply.Should().BeOfType(); - - reply = await this.BasicChatWithContinuousMessageFromSameSenderAsync(agent); - reply.Should().BeOfType(); - } - - [ApiKeyFact("GH_API_KEY")] - public async Task BasicConversation_Mistra_Small() - { - var deployName = "Mistral-small"; - var client = CreateChatCompletionClient(); - var openAIChatAgent = new ChatCompletionsClientAgent( - chatCompletionsClient: client, - name: "assistant", - modelName: deployName); - - // By default, ChatCompletionClientAgent supports the following message types - // - IMessage - var chatMessageContent = MessageEnvelope.Create(new ChatRequestUserMessage("Hello")); - var reply = await openAIChatAgent.SendAsync(chatMessageContent); - - reply.Should().BeOfType>(); - reply.As>().From.Should().Be("assistant"); - reply.As>().Content.Choices.First().Message.Role.Should().Be(ChatRole.Assistant); - reply.As>().Content.Usage.TotalTokens.Should().BeGreaterThan(0); - - // test streaming - var streamingReply = openAIChatAgent.GenerateStreamingReplyAsync(new[] { chatMessageContent }); - - await foreach (var streamingMessage in streamingReply) - { - streamingMessage.Should().BeOfType>(); - streamingMessage.As>().From.Should().Be("assistant"); - } - } - - [ApiKeyFact("GH_API_KEY")] - public async Task ChatCompletionsMessageContentConnector_Phi3_Mini() - { - var deployName = "Phi-3-mini-4k-instruct"; - var openaiClient = CreateChatCompletionClient(); - var chatCompletionAgent = new ChatCompletionsClientAgent( - chatCompletionsClient: openaiClient, - name: "assistant", - modelName: deployName); - - MiddlewareStreamingAgent assistant = chatCompletionAgent - .RegisterMessageConnector(); - - var messages = new IMessage[] - { - MessageEnvelope.Create(new ChatRequestUserMessage("Hello")), - new TextMessage(Role.Assistant, "Hello", from: "user"), - new MultiModalMessage(Role.Assistant, - [ - new TextMessage(Role.Assistant, "Hello", from: "user"), - ], - from: "user"), - }; - - foreach (var message in messages) - { - var reply = await assistant.SendAsync(message); - - reply.Should().BeOfType(); - reply.As().From.Should().Be("assistant"); - } - - // test streaming - foreach (var message in messages) - { - var reply = assistant.GenerateStreamingReplyAsync([message]); - - await foreach (var streamingMessage in reply) - { - streamingMessage.Should().BeOfType(); - streamingMessage.As().From.Should().Be("assistant"); - } - } - } - - [ApiKeyFact("GH_API_KEY")] - public async Task ChatCompletionClientAgentToolCall_Mistral_Nemo() - { - var deployName = "Mistral-nemo"; - var chatCompletionClient = CreateChatCompletionClient(); - var agent = new ChatCompletionsClientAgent( - chatCompletionsClient: chatCompletionClient, - name: "assistant", - modelName: deployName); - - var functionCallMiddleware = new FunctionCallMiddleware( - functions: [this.GetWeatherAsyncFunctionContract]); - MiddlewareStreamingAgent assistant = agent - .RegisterMessageConnector(); - - assistant.StreamingMiddlewares.Count().Should().Be(1); - var functionCallAgent = assistant - .RegisterStreamingMiddleware(functionCallMiddleware); - - var question = "What's the weather in Seattle"; - var messages = new IMessage[] - { - MessageEnvelope.Create(new ChatRequestUserMessage(question)), - new TextMessage(Role.Assistant, question, from: "user"), - new MultiModalMessage(Role.Assistant, - [ - new TextMessage(Role.Assistant, question, from: "user"), - ], - from: "user"), - }; - - foreach (var message in messages) - { - var reply = await functionCallAgent.SendAsync(message); - - reply.Should().BeOfType(); - reply.As().From.Should().Be("assistant"); - reply.As().ToolCalls.Count().Should().Be(1); - reply.As().ToolCalls.First().FunctionName.Should().Be(this.GetWeatherAsyncFunctionContract.Name); - } - - // test streaming - foreach (var message in messages) - { - var reply = functionCallAgent.GenerateStreamingReplyAsync([message]); - ToolCallMessage? toolCallMessage = null; - await foreach (var streamingMessage in reply) - { - streamingMessage.Should().BeOfType(); - streamingMessage.As().From.Should().Be("assistant"); - if (toolCallMessage is null) - { - toolCallMessage = new ToolCallMessage(streamingMessage.As()); - } - else - { - toolCallMessage.Update(streamingMessage.As()); - } - } - - toolCallMessage.Should().NotBeNull(); - toolCallMessage!.From.Should().Be("assistant"); - toolCallMessage.ToolCalls.Count().Should().Be(1); - toolCallMessage.ToolCalls.First().FunctionName.Should().Be(this.GetWeatherAsyncFunctionContract.Name); - } - } - - [ApiKeyFact("GH_API_KEY")] - public async Task ChatCompletionClientAgentToolCallInvoking_gpt_4o_mini() - { - var deployName = "gpt-4o-mini"; - var client = CreateChatCompletionClient(); - var agent = new ChatCompletionsClientAgent( - chatCompletionsClient: client, - name: "assistant", - modelName: deployName); - - var functionCallMiddleware = new FunctionCallMiddleware( - functions: [this.GetWeatherAsyncFunctionContract], - functionMap: new Dictionary>> { { this.GetWeatherAsyncFunctionContract.Name!, this.GetWeatherAsyncWrapper } }); - MiddlewareStreamingAgent assistant = agent - .RegisterMessageConnector(); - - var functionCallAgent = assistant - .RegisterStreamingMiddleware(functionCallMiddleware); - - var question = "What's the weather in Seattle"; - var messages = new IMessage[] - { - MessageEnvelope.Create(new ChatRequestUserMessage(question)), - new TextMessage(Role.Assistant, question, from: "user"), - new MultiModalMessage(Role.Assistant, - [ - new TextMessage(Role.Assistant, question, from: "user"), - ], - from: "user"), - }; - - foreach (var message in messages) - { - var reply = await functionCallAgent.SendAsync(message); - - reply.Should().BeOfType(); - reply.From.Should().Be("assistant"); - reply.GetToolCalls()!.Count().Should().Be(1); - reply.GetToolCalls()!.First().FunctionName.Should().Be(this.GetWeatherAsyncFunctionContract.Name); - reply.GetContent()!.ToLower().Should().Contain("seattle"); - } - - // test streaming - foreach (var message in messages) - { - var reply = functionCallAgent.GenerateStreamingReplyAsync([message]); - await foreach (var streamingMessage in reply) - { - if (streamingMessage is not IMessage) - { - streamingMessage.Should().BeOfType(); - streamingMessage.As().From.Should().Be("assistant"); - } - else - { - streamingMessage.Should().BeOfType(); - streamingMessage.As().GetContent()!.ToLower().Should().Contain("seattle"); - } - } - } - } - - [ApiKeyFact("GH_API_KEY")] - public async Task ItCreateChatCompletionClientAgentWithChatCompletionOption_AI21_Jamba_Instruct() - { - var deployName = "AI21-Jamba-Instruct"; - var chatCompletionsClient = CreateChatCompletionClient(); - var options = new ChatCompletionsOptions() - { - Model = deployName, - Temperature = 0.7f, - MaxTokens = 1, - }; - - var openAIChatAgent = new ChatCompletionsClientAgent( - chatCompletionsClient: chatCompletionsClient, - name: "assistant", - options: options) - .RegisterMessageConnector(); - - var respond = await openAIChatAgent.SendAsync("hello"); - respond.GetContent()?.Should().NotBeNullOrEmpty(); - } - - [Fact] - public async Task ItThrowExceptionWhenChatCompletionOptionContainsMessages() - { - var client = new ChatCompletionsClient(new Uri("https://dummy.com"), new Azure.AzureKeyCredential("dummy")); - var options = new ChatCompletionsOptions([new ChatRequestUserMessage("hi")]) - { - Model = "dummy", - Temperature = 0.7f, - MaxTokens = 1, - }; - - var action = () => new ChatCompletionsClientAgent( - chatCompletionsClient: client, - name: "assistant", - options: options) - .RegisterMessageConnector(); - - action.Should().ThrowExactly().WithMessage("Messages should not be provided in options"); - } - - private ChatCompletionsClient CreateChatCompletionClient() - { - var apiKey = Environment.GetEnvironmentVariable("GH_API_KEY") ?? throw new Exception("Please set GH_API_KEY environment variable."); - var endpoint = "https://models.inference.ai.azure.com"; - return new ChatCompletionsClient(new Uri(endpoint), new Azure.AzureKeyCredential(apiKey)); - } - - /// - /// The agent should return a text message based on the chat history. - /// - /// - /// - private async Task BasicChatEndWithSelfMessageAsync(IAgent agent) - { - IMessage[] chatHistory = [ - new TextMessage(Role.Assistant, "Hello", from: "user"), - new TextMessage(Role.Assistant, "Hello", from: "user2"), - new TextMessage(Role.Assistant, "Hello", from: "user3"), - new TextMessage(Role.Assistant, "Hello", from: agent.Name), - ]; - - return await agent.GenerateReplyAsync(chatHistory); - } - - /// - /// The agent should return a text message based on the chat history. - /// - /// - /// - private async Task BasicChatAsync(IAgent agent) - { - IMessage[] chatHistory = [ - new TextMessage(Role.Assistant, "Hello", from: agent.Name), - new TextMessage(Role.Assistant, "Hello", from: "user"), - new TextMessage(Role.Assistant, "Hello", from: "user1"), - ]; - - return await agent.GenerateReplyAsync(chatHistory); - } - - /// - /// The agent should return a text message based on the chat history. This test the generate reply with continuous message from the same sender. - /// - private async Task BasicChatWithContinuousMessageFromSameSenderAsync(IAgent agent) - { - IMessage[] chatHistory = [ - new TextMessage(Role.Assistant, "Hello", from: "user"), - new TextMessage(Role.Assistant, "Hello", from: "user"), - new TextMessage(Role.Assistant, "Hello", from: agent.Name), - new TextMessage(Role.Assistant, "Hello", from: agent.Name), - ]; - - return await agent.GenerateReplyAsync(chatHistory); - } - - /// - /// The agent should return a text message based on the chat history. - /// - /// - /// - private async Task ImageChatAsync(IAgent agent) - { - var image = Path.Join("testData", "images", "square.png"); - var binaryData = File.ReadAllBytes(image); - var imageMessage = new ImageMessage(Role.Assistant, BinaryData.FromBytes(binaryData, "image/png"), from: "user"); - - IMessage[] chatHistory = [ - imageMessage, - new TextMessage(Role.Assistant, "What's in the picture", from: "user"), - ]; - - return await agent.GenerateReplyAsync(chatHistory); - } - - /// - /// The agent should return a text message based on the chat history. This test the generate reply with continuous image messages. - /// - /// - /// - private async Task MultipleImageChatAsync(IAgent agent) - { - var image1 = Path.Join("testData", "images", "square.png"); - var image2 = Path.Join("testData", "images", "background.png"); - var binaryData1 = File.ReadAllBytes(image1); - var binaryData2 = File.ReadAllBytes(image2); - var imageMessage1 = new ImageMessage(Role.Assistant, BinaryData.FromBytes(binaryData1, "image/png"), from: "user"); - var imageMessage2 = new ImageMessage(Role.Assistant, BinaryData.FromBytes(binaryData2, "image/png"), from: "user"); - - IMessage[] chatHistory = [ - imageMessage1, - imageMessage2, - new TextMessage(Role.Assistant, "What's in the picture", from: "user"), - ]; - - return await agent.GenerateReplyAsync(chatHistory); - } - - /// - /// The agent should return a text message based on the chat history. - /// - /// - /// - private async Task MultiModalChatAsync(IAgent agent) - { - var image = Path.Join("testData", "images", "square.png"); - var binaryData = File.ReadAllBytes(image); - var question = "What's in the picture"; - var imageMessage = new ImageMessage(Role.Assistant, BinaryData.FromBytes(binaryData, "image/png"), from: "user"); - var textMessage = new TextMessage(Role.Assistant, question, from: "user"); - - IMessage[] chatHistory = [ - new MultiModalMessage(Role.Assistant, [imageMessage, textMessage], from: "user"), - ]; - - return await agent.GenerateReplyAsync(chatHistory); - } - - /// - /// The agent should return a tool call message based on the chat history. - /// - /// - /// - private async Task ToolCallChatAsync(IAgent agent) - { - var question = "What's the weather in Seattle"; - var messages = new IMessage[] - { - new TextMessage(Role.Assistant, question, from: "user"), - }; - - return await agent.GenerateReplyAsync(messages); - } - - /// - /// The agent should throw an exception because tool call result is not available. - /// - private async Task ToolCallFromSelfChatAsync(IAgent agent) - { - var question = "What's the weather in Seattle"; - var messages = new IMessage[] - { - new TextMessage(Role.Assistant, question, from: "user"), - new ToolCallMessage("GetWeatherAsync", "Seattle", from: agent.Name), - }; - - return await agent.GenerateReplyAsync(messages); - } - - /// - /// mimic the further chat after tool call. The agent should return a text message based on the tool call result. - /// - private async Task ToolCallWithResultChatAsync(IAgent agent) - { - var question = "What's the weather in Seattle"; - var messages = new IMessage[] - { - new TextMessage(Role.Assistant, question, from: "user"), - new ToolCallMessage("GetWeatherAsync", "Seattle", from: "user"), - new ToolCallResultMessage("sunny", "GetWeatherAsync", "Seattle", from: agent.Name), - }; - - return await agent.GenerateReplyAsync(messages); - } - - /// - /// the agent should return a text message based on the tool call result. - /// - /// - /// - private async Task AggregateToolCallFromSelfChatAsync(IAgent agent) - { - var textMessage = new TextMessage(Role.Assistant, "What's the weather in Seattle", from: "user"); - var toolCallMessage = new ToolCallMessage("GetWeatherAsync", "Seattle", from: agent.Name); - var toolCallResultMessage = new ToolCallResultMessage("sunny", "GetWeatherAsync", "Seattle", from: agent.Name); - var aggregateToolCallMessage = new ToolCallAggregateMessage(toolCallMessage, toolCallResultMessage, from: agent.Name); - - var messages = new IMessage[] - { - textMessage, - aggregateToolCallMessage, - }; - - return await agent.GenerateReplyAsync(messages); - } - - /// - /// the agent should return a text message based on the tool call result. Because the aggregate tool call message is from other, the message would be treated as an ordinary text message. - /// - private async Task AggregateToolCallFromOtherChatWithContinuousMessageAsync(IAgent agent) - { - var textMessage = new TextMessage(Role.Assistant, "What's the weather in Seattle", from: "user"); - var toolCallMessage = new ToolCallMessage("GetWeatherAsync", "Seattle", from: "other"); - var toolCallResultMessage = new ToolCallResultMessage("sunny", "GetWeatherAsync", "Seattle", from: "other"); - var aggregateToolCallMessage = new ToolCallAggregateMessage(toolCallMessage, toolCallResultMessage, "other"); - - var messages = new IMessage[] - { - textMessage, - aggregateToolCallMessage, - }; - - return await agent.GenerateReplyAsync(messages); - } - - /// - /// The agent should throw an exception because tool call message from other is not allowed. - /// - private async Task ToolCallMessaageFromOtherChatAsync(IAgent agent) - { - var textMessage = new TextMessage(Role.Assistant, "What's the weather in Seattle", from: "user"); - var toolCallMessage = new ToolCallMessage("GetWeatherAsync", "Seattle", from: "other"); - - var messages = new IMessage[] - { - textMessage, - toolCallMessage, - }; - - return await agent.GenerateReplyAsync(messages); - } - - /// - /// The agent should throw an exception because multi-modal message from self is not allowed. - /// - /// - /// - private async Task MultiModalMessageFromSelfChatAsync(IAgent agent) - { - var image = Path.Join("testData", "images", "square.png"); - var binaryData = File.ReadAllBytes(image); - var question = "What's in the picture"; - var imageMessage = new ImageMessage(Role.Assistant, BinaryData.FromBytes(binaryData, "image/png"), from: agent.Name); - var textMessage = new TextMessage(Role.Assistant, question, from: agent.Name); - - IMessage[] chatHistory = [ - new MultiModalMessage(Role.Assistant, [imageMessage, textMessage], from: agent.Name), - ]; - - return await agent.GenerateReplyAsync(chatHistory); - } -} diff --git a/dotnet/test/AutoGen.AzureAIInference.Tests/ChatRequestMessageTests.cs b/dotnet/test/AutoGen.AzureAIInference.Tests/ChatRequestMessageTests.cs deleted file mode 100644 index 3eb041b7f2..0000000000 --- a/dotnet/test/AutoGen.AzureAIInference.Tests/ChatRequestMessageTests.cs +++ /dev/null @@ -1,574 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ChatRequestMessageTests.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Reflection; -using System.Text.Json; -using System.Threading.Tasks; -using AutoGen.Core; -using AutoGen.Tests; -using Azure.AI.Inference; -using FluentAssertions; -using Xunit; - -namespace AutoGen.AzureAIInference.Tests; - -public class ChatRequestMessageTests -{ - private readonly JsonSerializerOptions jsonSerializerOptions = new JsonSerializerOptions - { - WriteIndented = true, - IgnoreReadOnlyProperties = false, - }; - - [Fact] - public async Task ItProcessUserTextMessageAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestUserMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().Be("Hello"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - IMessage message = new TextMessage(Role.User, "Hello", "user"); - await agent.GenerateReplyAsync([message]); - } - - [Fact] - public async Task ItShortcutChatRequestMessageAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - - var chatRequestMessage = (ChatRequestUserMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().Be("hello"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - var userMessage = new ChatRequestUserMessage("hello"); - var chatRequestMessage = MessageEnvelope.Create(userMessage); - await agent.GenerateReplyAsync([chatRequestMessage]); - } - - [Fact] - public async Task ItShortcutMessageWhenStrictModelIsFalseAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - - var chatRequestMessage = ((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Should().Be("hello"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - var userMessage = "hello"; - var chatRequestMessage = MessageEnvelope.Create(userMessage); - await agent.GenerateReplyAsync([chatRequestMessage]); - } - - [Fact] - public async Task ItThrowExceptionWhenStrictModeIsTrueAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(true); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(middleware); - - // user message - var userMessage = "hello"; - var chatRequestMessage = MessageEnvelope.Create(userMessage); - Func action = async () => await agent.GenerateReplyAsync([chatRequestMessage]); - - await action.Should().ThrowAsync().WithMessage("Invalid message type: MessageEnvelope`1"); - } - - [Fact] - public async Task ItProcessAssistantTextMessageAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestAssistantMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().Be("How can I help you?"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // assistant message - IMessage message = new TextMessage(Role.Assistant, "How can I help you?", "assistant"); - await agent.GenerateReplyAsync([message]); - } - - [Fact] - public async Task ItProcessSystemTextMessageAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestSystemMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().Be("You are a helpful AI assistant"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // system message - IMessage message = new TextMessage(Role.System, "You are a helpful AI assistant"); - await agent.GenerateReplyAsync([message]); - } - - [Fact] - public async Task ItProcessImageMessageAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestUserMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().BeNullOrEmpty(); - chatRequestMessage.MultimodalContentItems.Count().Should().Be(1); - chatRequestMessage.MultimodalContentItems.First().Should().BeOfType(); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - IMessage message = new ImageMessage(Role.User, "https://example.com/image.png", "user"); - await agent.GenerateReplyAsync([message]); - } - - [Fact] - public async Task ItThrowExceptionWhenProcessingImageMessageFromSelfAndStrictModeIsTrueAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(true); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(middleware); - - var imageMessage = new ImageMessage(Role.Assistant, "https://example.com/image.png", "assistant"); - Func action = async () => await agent.GenerateReplyAsync([imageMessage]); - - await action.Should().ThrowAsync().WithMessage("Invalid message type: ImageMessage"); - } - - [Fact] - public async Task ItProcessMultiModalMessageAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestUserMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().BeNullOrEmpty(); - chatRequestMessage.MultimodalContentItems.Count().Should().Be(2); - chatRequestMessage.MultimodalContentItems.First().Should().BeOfType(); - chatRequestMessage.MultimodalContentItems.Last().Should().BeOfType(); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - IMessage message = new MultiModalMessage( - Role.User, - [ - new TextMessage(Role.User, "Hello", "user"), - new ImageMessage(Role.User, "https://example.com/image.png", "user"), - ], "user"); - await agent.GenerateReplyAsync([message]); - } - - [Fact] - public async Task ItThrowExceptionWhenProcessingMultiModalMessageFromSelfAndStrictModeIsTrueAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(true); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(middleware); - - var multiModalMessage = new MultiModalMessage( - Role.Assistant, - [ - new TextMessage(Role.User, "Hello", "assistant"), - new ImageMessage(Role.User, "https://example.com/image.png", "assistant"), - ], "assistant"); - - Func action = async () => await agent.GenerateReplyAsync([multiModalMessage]); - - await action.Should().ThrowAsync().WithMessage("Invalid message type: MultiModalMessage"); - } - - [Fact] - public async Task ItProcessToolCallMessageAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestAssistantMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.ToolCalls.Count().Should().Be(1); - chatRequestMessage.Content.Should().Be("textContent"); - chatRequestMessage.ToolCalls.First().Should().BeOfType(); - var functionToolCall = (ChatCompletionsFunctionToolCall)chatRequestMessage.ToolCalls.First(); - functionToolCall.Name.Should().Be("test"); - functionToolCall.Id.Should().Be("test"); - functionToolCall.Arguments.Should().Be("test"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - IMessage message = new ToolCallMessage("test", "test", "assistant") - { - Content = "textContent", - }; - await agent.GenerateReplyAsync([message]); - } - - [Fact] - public async Task ItProcessParallelToolCallMessageAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestAssistantMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().BeNullOrEmpty(); - chatRequestMessage.ToolCalls.Count().Should().Be(2); - for (int i = 0; i < chatRequestMessage.ToolCalls.Count(); i++) - { - chatRequestMessage.ToolCalls.ElementAt(i).Should().BeOfType(); - var functionToolCall = (ChatCompletionsFunctionToolCall)chatRequestMessage.ToolCalls.ElementAt(i); - functionToolCall.Name.Should().Be("test"); - functionToolCall.Id.Should().Be($"test_{i}"); - functionToolCall.Arguments.Should().Be("test"); - } - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - var toolCalls = new[] - { - new ToolCall("test", "test"), - new ToolCall("test", "test"), - }; - IMessage message = new ToolCallMessage(toolCalls, "assistant"); - await agent.GenerateReplyAsync([message]); - } - - [Fact] - public async Task ItThrowExceptionWhenProcessingToolCallMessageFromUserAndStrictModeIsTrueAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(strictMode: true); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(middleware); - - var toolCallMessage = new ToolCallMessage("test", "test", "user"); - Func action = async () => await agent.GenerateReplyAsync([toolCallMessage]); - await action.Should().ThrowAsync().WithMessage("Invalid message type: ToolCallMessage"); - } - - [Fact] - public async Task ItProcessToolCallResultMessageAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestToolMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().Be("result"); - chatRequestMessage.ToolCallId.Should().Be("test"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - IMessage message = new ToolCallResultMessage("result", "test", "test", "user"); - await agent.GenerateReplyAsync([message]); - } - - [Fact] - public async Task ItProcessParallelToolCallResultMessageAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - msgs.Count().Should().Be(2); - - for (int i = 0; i < msgs.Count(); i++) - { - var innerMessage = msgs.ElementAt(i); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestToolMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().Be("result"); - chatRequestMessage.ToolCallId.Should().Be($"test_{i}"); - } - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - var toolCalls = new[] - { - new ToolCall("test", "test", "result"), - new ToolCall("test", "test", "result"), - }; - IMessage message = new ToolCallResultMessage(toolCalls, "user"); - await agent.GenerateReplyAsync([message]); - } - - [Fact] - public async Task ItProcessFunctionCallMiddlewareMessageFromUserAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - msgs.Count().Should().Be(1); - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestUserMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().Be("result"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - var toolCallMessage = new ToolCallMessage("test", "test", "user"); - var toolCallResultMessage = new ToolCallResultMessage("result", "test", "test", "user"); - var aggregateMessage = new AggregateMessage(toolCallMessage, toolCallResultMessage, "user"); - await agent.GenerateReplyAsync([aggregateMessage]); - } - - [Fact] - public async Task ItProcessFunctionCallMiddlewareMessageFromAssistantAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - msgs.Count().Should().Be(2); - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestToolMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().Be("result"); - chatRequestMessage.ToolCallId.Should().Be("test"); - - var toolCallMessage = msgs.First(); - toolCallMessage!.Should().BeOfType>(); - var toolCallRequestMessage = (ChatRequestAssistantMessage)((MessageEnvelope)toolCallMessage!).Content; - toolCallRequestMessage.Content.Should().BeNullOrEmpty(); - toolCallRequestMessage.ToolCalls.Count().Should().Be(1); - toolCallRequestMessage.ToolCalls.First().Should().BeOfType(); - var functionToolCall = (ChatCompletionsFunctionToolCall)toolCallRequestMessage.ToolCalls.First(); - functionToolCall.Name.Should().Be("test"); - functionToolCall.Id.Should().Be("test"); - functionToolCall.Arguments.Should().Be("test"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - var toolCallMessage = new ToolCallMessage("test", "test", "assistant"); - var toolCallResultMessage = new ToolCallResultMessage("result", "test", "test", "assistant"); - var aggregateMessage = new ToolCallAggregateMessage(toolCallMessage, toolCallResultMessage, "assistant"); - await agent.GenerateReplyAsync([aggregateMessage]); - } - - [Fact] - public async Task ItProcessParallelFunctionCallMiddlewareMessageFromAssistantAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - msgs.Count().Should().Be(3); - var toolCallMessage = msgs.First(); - toolCallMessage!.Should().BeOfType>(); - var toolCallRequestMessage = (ChatRequestAssistantMessage)((MessageEnvelope)toolCallMessage!).Content; - toolCallRequestMessage.Content.Should().BeNullOrEmpty(); - toolCallRequestMessage.ToolCalls.Count().Should().Be(2); - - for (int i = 0; i < toolCallRequestMessage.ToolCalls.Count(); i++) - { - toolCallRequestMessage.ToolCalls.ElementAt(i).Should().BeOfType(); - var functionToolCall = (ChatCompletionsFunctionToolCall)toolCallRequestMessage.ToolCalls.ElementAt(i); - functionToolCall.Name.Should().Be("test"); - functionToolCall.Id.Should().Be($"test_{i}"); - functionToolCall.Arguments.Should().Be("test"); - } - - for (int i = 1; i < msgs.Count(); i++) - { - var toolCallResultMessage = msgs.ElementAt(i); - toolCallResultMessage!.Should().BeOfType>(); - var toolCallResultRequestMessage = (ChatRequestToolMessage)((MessageEnvelope)toolCallResultMessage!).Content; - toolCallResultRequestMessage.Content.Should().Be("result"); - toolCallResultRequestMessage.ToolCallId.Should().Be($"test_{i - 1}"); - } - - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - var toolCalls = new[] - { - new ToolCall("test", "test", "result"), - new ToolCall("test", "test", "result"), - }; - var toolCallMessage = new ToolCallMessage(toolCalls, "assistant"); - var toolCallResultMessage = new ToolCallResultMessage(toolCalls, "assistant"); - var aggregateMessage = new AggregateMessage(toolCallMessage, toolCallResultMessage, "assistant"); - await agent.GenerateReplyAsync([aggregateMessage]); - } - - [Fact] - public async Task ItConvertChatResponseMessageToTextMessageAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(middleware); - - // text message - var textMessage = CreateInstance(ChatRole.Assistant, "hello"); - var chatRequestMessage = MessageEnvelope.Create(textMessage); - - var message = await agent.GenerateReplyAsync([chatRequestMessage]); - message.Should().BeOfType(); - message.GetContent().Should().Be("hello"); - message.GetRole().Should().Be(Role.Assistant); - } - - [Fact] - public async Task ItConvertChatResponseMessageToToolCallMessageAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(middleware); - - // tool call message - var toolCallMessage = CreateInstance(ChatRole.Assistant, "textContent", new[] { new ChatCompletionsFunctionToolCall("test", "test", "test") }, new Dictionary()); - var chatRequestMessage = MessageEnvelope.Create(toolCallMessage); - var message = await agent.GenerateReplyAsync([chatRequestMessage]); - message.Should().BeOfType(); - message.GetToolCalls()!.Count().Should().Be(1); - message.GetToolCalls()!.First().FunctionName.Should().Be("test"); - message.GetToolCalls()!.First().FunctionArguments.Should().Be("test"); - message.GetContent().Should().Be("textContent"); - } - - [Fact] - public async Task ItReturnOriginalMessageWhenStrictModeIsFalseAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(middleware); - - // text message - var textMessage = "hello"; - var messageToSend = MessageEnvelope.Create(textMessage); - - var message = await agent.GenerateReplyAsync([messageToSend]); - message.Should().BeOfType>(); - } - - [Fact] - public async Task ItThrowInvalidOperationExceptionWhenStrictModeIsTrueAsync() - { - var middleware = new AzureAIInferenceChatRequestMessageConnector(true); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(middleware); - - // text message - var textMessage = new ChatRequestUserMessage("hello"); - var messageToSend = MessageEnvelope.Create(textMessage); - Func action = async () => await agent.GenerateReplyAsync([messageToSend]); - - await action.Should().ThrowAsync().WithMessage("Invalid return message type MessageEnvelope`1"); - } - - [Fact] - public void ToOpenAIChatRequestMessageShortCircuitTest() - { - var agent = new EchoAgent("assistant"); - var middleware = new AzureAIInferenceChatRequestMessageConnector(); - ChatRequestMessage[] messages = - [ - new ChatRequestUserMessage("Hello"), - new ChatRequestAssistantMessage() - { - Content = "How can I help you?", - }, - new ChatRequestSystemMessage("You are a helpful AI assistant"), - new ChatRequestToolMessage("test", "test"), - ]; - - foreach (var oaiMessage in messages) - { - IMessage message = new MessageEnvelope(oaiMessage); - var oaiMessages = middleware.ProcessIncomingMessages(agent, [message]); - oaiMessages.Count().Should().Be(1); - //oaiMessages.First().Should().BeOfType>(); - if (oaiMessages.First() is IMessage chatRequestMessage) - { - chatRequestMessage.Content.Should().Be(oaiMessage); - } - else - { - // fail the test - Assert.True(false); - } - } - } - - private static T CreateInstance(params object[] args) - { - var type = typeof(T); - var instance = type.Assembly.CreateInstance( - type.FullName!, false, - BindingFlags.Instance | BindingFlags.NonPublic, - null, args, null, null); - return (T)instance!; - } -} diff --git a/dotnet/test/AutoGen.DotnetInteractive.Tests/AutoGen.DotnetInteractive.Tests.csproj b/dotnet/test/AutoGen.DotnetInteractive.Tests/AutoGen.DotnetInteractive.Tests.csproj deleted file mode 100644 index 8676762015..0000000000 --- a/dotnet/test/AutoGen.DotnetInteractive.Tests/AutoGen.DotnetInteractive.Tests.csproj +++ /dev/null @@ -1,21 +0,0 @@ -ο»Ώ - - - $(TestTargetFrameworks) - enable - false - True - True - - - - - - - - - - - - - diff --git a/dotnet/test/AutoGen.DotnetInteractive.Tests/DotnetInteractiveServiceTest.cs b/dotnet/test/AutoGen.DotnetInteractive.Tests/DotnetInteractiveServiceTest.cs deleted file mode 100644 index ee094283f7..0000000000 --- a/dotnet/test/AutoGen.DotnetInteractive.Tests/DotnetInteractiveServiceTest.cs +++ /dev/null @@ -1,89 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// DotnetInteractiveServiceTest.cs - -using FluentAssertions; -using Xunit; -using Xunit.Abstractions; - -namespace AutoGen.DotnetInteractive.Tests; - -[Collection("Sequential")] -public class DotnetInteractiveServiceTest : IDisposable -{ - private ITestOutputHelper _output; - private InteractiveService _interactiveService; - private string _workingDir; - - public DotnetInteractiveServiceTest(ITestOutputHelper output) - { - _output = output; - _workingDir = Path.Combine(Path.GetTempPath(), "test", Path.GetRandomFileName()); - if (!Directory.Exists(_workingDir)) - { - Directory.CreateDirectory(_workingDir); - } - - _interactiveService = new InteractiveService(_workingDir); - _interactiveService.StartAsync(_workingDir, default).Wait(); - } - - public void Dispose() - { - _interactiveService.Dispose(); - } - - [Fact] - public async Task ItRunCSharpCodeSnippetTestsAsync() - { - var cts = new CancellationTokenSource(); - var isRunning = await _interactiveService.StartAsync(_workingDir, cts.Token); - - isRunning.Should().BeTrue(); - - _interactiveService.IsRunning().Should().BeTrue(); - - // test code snippet - var hello_world = @" -Console.WriteLine(""hello world""); -"; - - await this.TestCSharpCodeSnippet(_interactiveService, hello_world, "hello world"); - await this.TestCSharpCodeSnippet( - _interactiveService, - code: @" -Console.WriteLine(""hello world"" -", - expectedOutput: "Error: (2,32): error CS1026: ) expected"); - - await this.TestCSharpCodeSnippet( - service: _interactiveService, - code: "throw new Exception();", - expectedOutput: "Error: System.Exception: Exception of type 'System.Exception' was thrown"); - } - - [Fact] - public async Task ItRunPowershellScriptTestsAsync() - { - // test power shell - var ps = @"Write-Output ""hello world"""; - await this.TestPowershellCodeSnippet(_interactiveService, ps, "hello world"); - } - - private async Task TestPowershellCodeSnippet(InteractiveService service, string code, string expectedOutput) - { - var result = await service.SubmitPowershellCodeAsync(code, CancellationToken.None); - result.Should().StartWith(expectedOutput); - } - - private async Task TestCSharpCodeSnippet(InteractiveService service, string code, string expectedOutput) - { - var result = await service.SubmitCSharpCodeAsync(code, CancellationToken.None); - result.Should().StartWith(expectedOutput); - } -} diff --git a/dotnet/test/AutoGen.DotnetInteractive.Tests/DotnetInteractiveStdioKernelConnectorTests.cs b/dotnet/test/AutoGen.DotnetInteractive.Tests/DotnetInteractiveStdioKernelConnectorTests.cs deleted file mode 100644 index d8328fa71f..0000000000 --- a/dotnet/test/AutoGen.DotnetInteractive.Tests/DotnetInteractiveStdioKernelConnectorTests.cs +++ /dev/null @@ -1,91 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// DotnetInteractiveStdioKernelConnectorTests.cs - -using AutoGen.DotnetInteractive.Extension; -using FluentAssertions; -using Microsoft.DotNet.Interactive; -using Xunit; -using Xunit.Abstractions; - -namespace AutoGen.DotnetInteractive.Tests; - -[Collection("Sequential")] -public class DotnetInteractiveStdioKernelConnectorTests : IDisposable -{ - private string _workingDir; - private Kernel kernel; - public DotnetInteractiveStdioKernelConnectorTests(ITestOutputHelper output) - { - _workingDir = Path.Combine(Path.GetTempPath(), "test", Path.GetRandomFileName()); - if (!Directory.Exists(_workingDir)) - { - Directory.CreateDirectory(_workingDir); - } - - kernel = DotnetInteractiveKernelBuilder - .CreateKernelBuilder(_workingDir) - .RestoreDotnetInteractive() - .AddPythonKernel("python3") - .BuildAsync().Result; - } - - - [Fact] - public async Task ItAddCSharpKernelTestAsync() - { - var csharpCode = """ - #r "nuget:Microsoft.ML, 1.5.2" - var str = "Hello" + ", World!"; - Console.WriteLine(str); - """; - - var result = await this.kernel.RunSubmitCodeCommandAsync(csharpCode, "csharp"); - result.Should().Contain("Hello, World!"); - } - - [Fact] - public async Task ItAddPowershellKernelTestAsync() - { - var powershellCode = @" - Write-Host 'Hello, World!' - "; - - var result = await this.kernel.RunSubmitCodeCommandAsync(powershellCode, "pwsh"); - result.Should().Contain("Hello, World!"); - } - - [Fact] - public async Task ItAddFSharpKernelTestAsync() - { - var fsharpCode = """ - printfn "Hello, World!" - """; - - var result = await this.kernel.RunSubmitCodeCommandAsync(fsharpCode, "fsharp"); - result.Should().Contain("Hello, World!"); - } - - [Fact] - public async Task ItAddPythonKernelTestAsync() - { - var pythonCode = """ - %pip install numpy - str = 'Hello' + ', World!' - print(str) - """; - - var result = await this.kernel.RunSubmitCodeCommandAsync(pythonCode, "python"); - result.Should().Contain("Hello, World!"); - } - - public void Dispose() - { - this.kernel.Dispose(); - } -} diff --git a/dotnet/test/AutoGen.DotnetInteractive.Tests/InProcessDotnetInteractiveKernelBuilderTest.cs b/dotnet/test/AutoGen.DotnetInteractive.Tests/InProcessDotnetInteractiveKernelBuilderTest.cs deleted file mode 100644 index 73ea9cbe4a..0000000000 --- a/dotnet/test/AutoGen.DotnetInteractive.Tests/InProcessDotnetInteractiveKernelBuilderTest.cs +++ /dev/null @@ -1,85 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// InProcessDotnetInteractiveKernelBuilderTest.cs - -using AutoGen.DotnetInteractive.Extension; -using FluentAssertions; -using Xunit; - -namespace AutoGen.DotnetInteractive.Tests; - -[Collection("Sequential")] -public class InProcessDotnetInteractiveKernelBuilderTest -{ - [Fact] - public async Task ItAddCSharpKernelTestAsync() - { - using var kernel = DotnetInteractiveKernelBuilder - .CreateEmptyInProcessKernelBuilder() - .AddCSharpKernel() - .Build(); - - var csharpCode = """ - #r "nuget:Microsoft.ML, 1.5.2" - Console.WriteLine("Hello, World!"); - """; - - var result = await kernel.RunSubmitCodeCommandAsync(csharpCode, "csharp"); - result.Should().Contain("Hello, World!"); - } - - [Fact] - public async Task ItAddPowershellKernelTestAsync() - { - using var kernel = DotnetInteractiveKernelBuilder - .CreateEmptyInProcessKernelBuilder() - .AddPowershellKernel() - .Build(); - - var powershellCode = @" - Write-Host 'Hello, World!' - "; - - var result = await kernel.RunSubmitCodeCommandAsync(powershellCode, "pwsh"); - result.Should().Contain("Hello, World!"); - } - - [Fact] - public async Task ItAddFSharpKernelTestAsync() - { - using var kernel = DotnetInteractiveKernelBuilder - .CreateEmptyInProcessKernelBuilder() - .AddFSharpKernel() - .Build(); - - var fsharpCode = """ - #r "nuget:Microsoft.ML, 1.5.2" - printfn "Hello, World!" - """; - - var result = await kernel.RunSubmitCodeCommandAsync(fsharpCode, "fsharp"); - result.Should().Contain("Hello, World!"); - } - - [Fact] - public async Task ItAddPythonKernelTestAsync() - { - using var kernel = DotnetInteractiveKernelBuilder - .CreateEmptyInProcessKernelBuilder() - .AddPythonKernel("python3") - .Build(); - - var pythonCode = """ - %pip install numpy - print('Hello, World!') - """; - - var result = await kernel.RunSubmitCodeCommandAsync(pythonCode, "python"); - result.Should().Contain("Hello, World!"); - } -} diff --git a/dotnet/test/AutoGen.DotnetInteractive.Tests/MessageExtensionTests.cs b/dotnet/test/AutoGen.DotnetInteractive.Tests/MessageExtensionTests.cs deleted file mode 100644 index e1986a1547..0000000000 --- a/dotnet/test/AutoGen.DotnetInteractive.Tests/MessageExtensionTests.cs +++ /dev/null @@ -1,90 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MessageExtensionTests.cs - -using AutoGen.Core; -using AutoGen.DotnetInteractive.Extension; -using FluentAssertions; -using Xunit; - -namespace AutoGen.DotnetInteractive.Tests; - -public class MessageExtensionTests -{ - [Fact] - public void ExtractCodeBlock_WithSingleCodeBlock_ShouldReturnCodeBlock() - { - // Arrange - var message = new TextMessage(Role.Assistant, "```csharp\nConsole.WriteLine(\"Hello, World!\");\n```"); - var codeBlockPrefix = "```csharp"; - var codeBlockSuffix = "```"; - - // Act - var codeBlock = message.ExtractCodeBlock(codeBlockPrefix, codeBlockSuffix); - - codeBlock.Should().BeEquivalentTo("Console.WriteLine(\"Hello, World!\");"); - } - - [Fact] - public void ExtractCodeBlock_WithMultipleCodeBlocks_ShouldReturnFirstCodeBlock() - { - // Arrange - var message = new TextMessage(Role.Assistant, "```csharp\nConsole.WriteLine(\"Hello, World!\");\n```\n```csharp\nConsole.WriteLine(\"Hello, World!\");\n```"); - var codeBlockPrefix = "```csharp"; - var codeBlockSuffix = "```"; - - // Act - var codeBlock = message.ExtractCodeBlock(codeBlockPrefix, codeBlockSuffix); - - codeBlock.Should().BeEquivalentTo("Console.WriteLine(\"Hello, World!\");"); - } - - [Fact] - public void ExtractCodeBlock_WithNoCodeBlock_ShouldReturnNull() - { - // Arrange - var message = new TextMessage(Role.Assistant, "Hello, World!"); - var codeBlockPrefix = "```csharp"; - var codeBlockSuffix = "```"; - - // Act - var codeBlock = message.ExtractCodeBlock(codeBlockPrefix, codeBlockSuffix); - - codeBlock.Should().BeNull(); - } - - [Fact] - public void ExtractCodeBlocks_WithMultipleCodeBlocks_ShouldReturnAllCodeBlocks() - { - // Arrange - var message = new TextMessage(Role.Assistant, "```csharp\nConsole.WriteLine(\"Hello, World!\");\n```\n```csharp\nConsole.WriteLine(\"Hello, World!\");\n```"); - var codeBlockPrefix = "```csharp"; - var codeBlockSuffix = "```"; - - // Act - var codeBlocks = message.ExtractCodeBlocks(codeBlockPrefix, codeBlockSuffix); - - codeBlocks.Should().HaveCount(2); - codeBlocks.ElementAt(0).Should().BeEquivalentTo("Console.WriteLine(\"Hello, World!\");"); - codeBlocks.ElementAt(1).Should().BeEquivalentTo("Console.WriteLine(\"Hello, World!\");"); - } - - [Fact] - public void ExtractCodeBlocks_WithNoCodeBlock_ShouldReturnEmpty() - { - // Arrange - var message = new TextMessage(Role.Assistant, "Hello, World!"); - var codeBlockPrefix = "```csharp"; - var codeBlockSuffix = "```"; - - // Act - var codeBlocks = message.ExtractCodeBlocks(codeBlockPrefix, codeBlockSuffix); - - codeBlocks.Should().BeEmpty(); - } -} diff --git a/dotnet/test/AutoGen.Gemini.Tests/ApprovalTests/FunctionContractExtensionTests.ItGenerateGetWeatherToolTest.approved.txt b/dotnet/test/AutoGen.Gemini.Tests/ApprovalTests/FunctionContractExtensionTests.ItGenerateGetWeatherToolTest.approved.txt deleted file mode 100644 index d7ec585cb2..0000000000 --- a/dotnet/test/AutoGen.Gemini.Tests/ApprovalTests/FunctionContractExtensionTests.ItGenerateGetWeatherToolTest.approved.txt +++ /dev/null @@ -1,17 +0,0 @@ -ο»Ώ{ - "name": "GetWeatherAsync", - "description": "Get weather for a city.", - "parameters": { - "type": "OBJECT", - "properties": { - "city": { - "type": "STRING", - "description": "city", - "title": "city" -} - }, - "required": [ - "city" - ] - } -} \ No newline at end of file diff --git a/dotnet/test/AutoGen.Gemini.Tests/AutoGen.Gemini.Tests.csproj b/dotnet/test/AutoGen.Gemini.Tests/AutoGen.Gemini.Tests.csproj deleted file mode 100644 index 0b9b7e2a24..0000000000 --- a/dotnet/test/AutoGen.Gemini.Tests/AutoGen.Gemini.Tests.csproj +++ /dev/null @@ -1,19 +0,0 @@ -ο»Ώ - - - Exe - $(TestTargetFrameworks) - enable - enable - True - True - - - - - - - - - - diff --git a/dotnet/test/AutoGen.Gemini.Tests/FunctionContractExtensionTests.cs b/dotnet/test/AutoGen.Gemini.Tests/FunctionContractExtensionTests.cs deleted file mode 100644 index 50b0b306e4..0000000000 --- a/dotnet/test/AutoGen.Gemini.Tests/FunctionContractExtensionTests.cs +++ /dev/null @@ -1,33 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// FunctionContractExtensionTests.cs - -using ApprovalTests; -using ApprovalTests.Namers; -using ApprovalTests.Reporters; -using AutoGen.Gemini.Extension; -using Google.Protobuf; -using Xunit; - -namespace AutoGen.Gemini.Tests; - -public class FunctionContractExtensionTests -{ - private readonly Functions functions = new Functions(); - [Fact] - [UseReporter(typeof(DiffReporter))] - [UseApprovalSubdirectory("ApprovalTests")] - public void ItGenerateGetWeatherToolTest() - { - var contract = functions.GetWeatherAsyncFunctionContract; - var tool = contract.ToFunctionDeclaration(); - var formatter = new JsonFormatter(JsonFormatter.Settings.Default.WithIndentation(" ")); - var json = formatter.Format(tool); - Approvals.Verify(json); - } -} diff --git a/dotnet/test/AutoGen.Gemini.Tests/Functions.cs b/dotnet/test/AutoGen.Gemini.Tests/Functions.cs deleted file mode 100644 index 9192944d71..0000000000 --- a/dotnet/test/AutoGen.Gemini.Tests/Functions.cs +++ /dev/null @@ -1,34 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// Functions.cs - -using AutoGen.Core; - -namespace AutoGen.Gemini.Tests; - -public partial class Functions -{ - /// - /// Get weather for a city. - /// - /// city - /// weather - [Function] - public async Task GetWeatherAsync(string city) - { - return await Task.FromResult($"The weather in {city} is sunny."); - } - - [Function] - public async Task GetMovies(string location, string description) - { - var movies = new List { "Barbie", "Spiderman", "Batman" }; - - return await Task.FromResult($"Movies playing in {location} based on {description} are: {string.Join(", ", movies)}"); - } -} diff --git a/dotnet/test/AutoGen.Gemini.Tests/GeminiAgentTests.cs b/dotnet/test/AutoGen.Gemini.Tests/GeminiAgentTests.cs deleted file mode 100644 index 973200b407..0000000000 --- a/dotnet/test/AutoGen.Gemini.Tests/GeminiAgentTests.cs +++ /dev/null @@ -1,316 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GeminiAgentTests.cs - -using AutoGen.Core; -using AutoGen.Gemini.Extension; -using AutoGen.Tests; -using FluentAssertions; -using Google.Cloud.AIPlatform.V1; -using Xunit.Abstractions; -using static Google.Cloud.AIPlatform.V1.Part; -namespace AutoGen.Gemini.Tests; - -public class GeminiAgentTests -{ - private readonly Functions functions = new Functions(); - private readonly ITestOutputHelper _output; - - public GeminiAgentTests(ITestOutputHelper output) - { - _output = output; - } - - [ApiKeyFact("GCP_VERTEX_PROJECT_ID")] - public async Task VertexGeminiAgentGenerateReplyForTextContentAsync() - { - var location = "us-central1"; - var project = Environment.GetEnvironmentVariable("GCP_VERTEX_PROJECT_ID") ?? throw new InvalidOperationException("GCP_VERTEX_PROJECT_ID is not set."); - var model = "gemini-1.5-flash-001"; - - var textContent = new Content - { - Role = "user", - Parts = - { - new Part - { - Text = "Hello", - } - } - }; - - var agent = new GeminiChatAgent( - name: "assistant", - model: model, - project: project, - location: location, - systemMessage: "You are a helpful AI assistant"); - var message = MessageEnvelope.Create(textContent, from: agent.Name); - - var completion = await agent.SendAsync(message); - - completion.Should().BeOfType>(); - completion.From.Should().Be(agent.Name); - - var response = ((MessageEnvelope)completion).Content; - response.Should().NotBeNull(); - response.Candidates.Count.Should().BeGreaterThan(0); - response.Candidates[0].Content.Parts[0].Text.Should().NotBeNullOrEmpty(); - } - - [ApiKeyFact("GCP_VERTEX_PROJECT_ID")] - public async Task VertexGeminiAgentGenerateStreamingReplyForTextContentAsync() - { - var location = "us-central1"; - var project = Environment.GetEnvironmentVariable("GCP_VERTEX_PROJECT_ID") ?? throw new InvalidOperationException("GCP_VERTEX_PROJECT_ID is not set."); - var model = "gemini-1.5-flash-001"; - - var textContent = new Content - { - Role = "user", - Parts = - { - new Part - { - Text = "Hello", - } - } - }; - - var agent = new GeminiChatAgent( - name: "assistant", - model: model, - project: project, - location: location, - systemMessage: "You are a helpful AI assistant"); - var message = MessageEnvelope.Create(textContent, from: agent.Name); - - var completion = agent.GenerateStreamingReplyAsync([message]); - var chunks = new List(); - IMessage finalReply = null!; - - await foreach (var item in completion) - { - item.Should().NotBeNull(); - item.From.Should().Be(agent.Name); - var streamingMessage = (IMessage)item; - streamingMessage.Content.Candidates.Should().NotBeNullOrEmpty(); - chunks.Add(item); - finalReply = item; - } - - chunks.Count.Should().BeGreaterThan(0); - finalReply.Should().NotBeNull(); - finalReply.Should().BeOfType>(); - var response = ((MessageEnvelope)finalReply).Content; - response.UsageMetadata.CandidatesTokenCount.Should().BeGreaterThan(0); - } - - [ApiKeyFact("GCP_VERTEX_PROJECT_ID")] - public async Task VertexGeminiAgentGenerateReplyWithToolsAsync() - { - var location = "us-central1"; - var project = Environment.GetEnvironmentVariable("GCP_VERTEX_PROJECT_ID") ?? throw new InvalidOperationException("GCP_VERTEX_PROJECT_ID is not set."); - var model = "gemini-1.5-flash-001"; - var tools = new Tool[] - { - new Tool - { - FunctionDeclarations = { - functions.GetWeatherAsyncFunctionContract.ToFunctionDeclaration(), - }, - }, - new Tool - { - FunctionDeclarations = - { - functions.GetMoviesFunctionContract.ToFunctionDeclaration(), - }, - }, - }; - - var textContent = new Content - { - Role = "user", - Parts = - { - new Part - { - Text = "what's the weather in seattle", - } - } - }; - - var agent = new GeminiChatAgent( - name: "assistant", - model: model, - project: project, - location: location, - systemMessage: "You are a helpful AI assistant", - tools: tools, - toolConfig: new ToolConfig() - { - FunctionCallingConfig = new FunctionCallingConfig() - { - Mode = FunctionCallingConfig.Types.Mode.Auto, - } - }); - - var message = MessageEnvelope.Create(textContent, from: agent.Name); - - var completion = await agent.SendAsync(message); - - completion.Should().BeOfType>(); - completion.From.Should().Be(agent.Name); - - var response = ((MessageEnvelope)completion).Content; - response.Should().NotBeNull(); - response.Candidates.Count.Should().BeGreaterThan(0); - response.Candidates[0].Content.Parts[0].DataCase.Should().Be(DataOneofCase.FunctionCall); - } - - [ApiKeyFact("GCP_VERTEX_PROJECT_ID")] - public async Task VertexGeminiAgentGenerateStreamingReplyWithToolsAsync() - { - var location = "us-central1"; - var project = Environment.GetEnvironmentVariable("GCP_VERTEX_PROJECT_ID") ?? throw new InvalidOperationException("GCP_VERTEX_PROJECT_ID is not set."); - var model = "gemini-1.5-flash-001"; - var tools = new Tool[] - { - new Tool - { - FunctionDeclarations = { functions.GetWeatherAsyncFunctionContract.ToFunctionDeclaration() }, - }, - }; - - var textContent = new Content - { - Role = "user", - Parts = - { - new Part - { - Text = "what's the weather in seattle", - } - } - }; - - var agent = new GeminiChatAgent( - name: "assistant", - model: model, - project: project, - location: location, - systemMessage: "You are a helpful AI assistant", - tools: tools, - toolConfig: new ToolConfig() - { - FunctionCallingConfig = new FunctionCallingConfig() - { - Mode = FunctionCallingConfig.Types.Mode.Auto, - } - }); - - var message = MessageEnvelope.Create(textContent, from: agent.Name); - - var chunks = new List(); - IMessage finalReply = null!; - - var completion = agent.GenerateStreamingReplyAsync([message]); - - await foreach (var item in completion) - { - item.Should().NotBeNull(); - item.From.Should().Be(agent.Name); - var streamingMessage = (IMessage)item; - streamingMessage.Content.Candidates.Should().NotBeNullOrEmpty(); - if (streamingMessage.Content.Candidates[0].FinishReason != Candidate.Types.FinishReason.Stop) - { - streamingMessage.Content.Candidates[0].Content.Parts[0].DataCase.Should().Be(DataOneofCase.FunctionCall); - } - chunks.Add(item); - finalReply = item; - } - - chunks.Count.Should().BeGreaterThan(0); - finalReply.Should().NotBeNull(); - finalReply.Should().BeOfType>(); - var response = ((MessageEnvelope)finalReply).Content; - response.UsageMetadata.CandidatesTokenCount.Should().BeGreaterThan(0); - } - - [ApiKeyFact("GCP_VERTEX_PROJECT_ID")] - public async Task GeminiAgentUpperCaseTestAsync() - { - var location = "us-central1"; - var project = Environment.GetEnvironmentVariable("GCP_VERTEX_PROJECT_ID") ?? throw new InvalidOperationException("GCP_VERTEX_PROJECT_ID is not set."); - var model = "gemini-1.5-flash-001"; - var agent = new GeminiChatAgent( - name: "assistant", - model: model, - project: project, - location: location) - .RegisterMessageConnector(); - - var singleAgentTest = new SingleAgentTest(_output); - await singleAgentTest.UpperCaseStreamingTestAsync(agent); - await singleAgentTest.UpperCaseTestAsync(agent); - } - - [ApiKeyFact("GCP_VERTEX_PROJECT_ID")] - public async Task GeminiAgentEchoFunctionCallTestAsync() - { - var location = "us-central1"; - var project = Environment.GetEnvironmentVariable("GCP_VERTEX_PROJECT_ID") ?? throw new InvalidOperationException("GCP_VERTEX_PROJECT_ID is not set."); - var model = "gemini-1.5-flash-001"; - var singleAgentTest = new SingleAgentTest(_output); - var echoFunctionContract = singleAgentTest.EchoAsyncFunctionContract; - var agent = new GeminiChatAgent( - name: "assistant", - model: model, - project: project, - location: location, - tools: - [ - new Tool - { - FunctionDeclarations = { echoFunctionContract.ToFunctionDeclaration() }, - }, - ]) - .RegisterMessageConnector(); - - await singleAgentTest.EchoFunctionCallTestAsync(agent); - } - - [ApiKeyFact("GCP_VERTEX_PROJECT_ID")] - public async Task GeminiAgentEchoFunctionCallExecutionTestAsync() - { - var location = "us-central1"; - var project = Environment.GetEnvironmentVariable("GCP_VERTEX_PROJECT_ID") ?? throw new InvalidOperationException("GCP_VERTEX_PROJECT_ID is not set."); - var model = "gemini-1.5-flash-001"; - var singleAgentTest = new SingleAgentTest(_output); - var echoFunctionContract = singleAgentTest.EchoAsyncFunctionContract; - var functionMiddleware = new FunctionCallMiddleware( - functions: [echoFunctionContract], - functionMap: new Dictionary>>() - { - { echoFunctionContract.Name!, singleAgentTest.EchoAsyncWrapper }, - }); - - var agent = new GeminiChatAgent( - name: "assistant", - model: model, - project: project, - location: location) - .RegisterMessageConnector() - .RegisterStreamingMiddleware(functionMiddleware); - - await singleAgentTest.EchoFunctionCallExecutionStreamingTestAsync(agent); - await singleAgentTest.EchoFunctionCallExecutionTestAsync(agent); - } -} diff --git a/dotnet/test/AutoGen.Gemini.Tests/GeminiMessageTests.cs b/dotnet/test/AutoGen.Gemini.Tests/GeminiMessageTests.cs deleted file mode 100644 index de9fa0b103..0000000000 --- a/dotnet/test/AutoGen.Gemini.Tests/GeminiMessageTests.cs +++ /dev/null @@ -1,385 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GeminiMessageTests.cs - -using AutoGen.Core; -using AutoGen.Tests; -using FluentAssertions; -using Google.Cloud.AIPlatform.V1; -using Xunit; - -namespace AutoGen.Gemini.Tests; - -public class GeminiMessageTests -{ - [Fact] - public async Task ItProcessUserTextMessageAsync() - { - var messageConnector = new GeminiMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, ct) => - { - msgs.Count().Should().Be(1); - var innerMessage = msgs.First(); - innerMessage.Should().BeOfType>(); - var message = (IMessage)innerMessage; - message.Content.Parts.Count.Should().Be(1); - message.Content.Role.Should().Be("user"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(messageConnector); - - // when from is null and role is user - await agent.SendAsync("Hello"); - - // when from is user and role is user - var userMessage = new TextMessage(Role.User, "Hello", from: "user"); - await agent.SendAsync(userMessage); - - // when from is user but role is assistant - userMessage = new TextMessage(Role.Assistant, "Hello", from: "user"); - await agent.SendAsync(userMessage); - } - - [Fact] - public async Task ItProcessAssistantTextMessageAsync() - { - var messageConnector = new GeminiMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, ct) => - { - msgs.Count().Should().Be(1); - var innerMessage = msgs.First(); - innerMessage.Should().BeOfType>(); - var message = (IMessage)innerMessage; - message.Content.Parts.Count.Should().Be(1); - message.Content.Role.Should().Be("model"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(messageConnector); - - // when from is user and role is assistant - var message = new TextMessage(Role.User, "Hello", from: agent.Name); - await agent.SendAsync(message); - - // when from is assistant and role is assistant - message = new TextMessage(Role.Assistant, "Hello", from: agent.Name); - await agent.SendAsync(message); - } - - [Fact] - public async Task ItProcessSystemTextMessageAsUserMessageWhenStrictModeIsFalseAsync() - { - var messageConnector = new GeminiMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, ct) => - { - msgs.Count().Should().Be(1); - var innerMessage = msgs.First(); - innerMessage.Should().BeOfType>(); - var message = (IMessage)innerMessage; - message.Content.Parts.Count.Should().Be(1); - message.Content.Role.Should().Be("user"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(messageConnector); - - var message = new TextMessage(Role.System, "Hello", from: agent.Name); - await agent.SendAsync(message); - } - - [Fact] - public async Task ItThrowExceptionOnSystemMessageWhenStrictModeIsTrueAsync() - { - var messageConnector = new GeminiMessageConnector(true); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(messageConnector); - - var message = new TextMessage(Role.System, "Hello", from: agent.Name); - var action = new Func(async () => await agent.SendAsync(message)); - await action.Should().ThrowAsync(); - } - - [Fact] - public async Task ItProcessUserImageMessageAsInlineDataAsync() - { - var messageConnector = new GeminiMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, ct) => - { - msgs.Count().Should().Be(1); - var innerMessage = msgs.First(); - innerMessage.Should().BeOfType>(); - var message = (IMessage)innerMessage; - message.Content.Parts.Count.Should().Be(1); - message.Content.Role.Should().Be("user"); - message.Content.Parts.First().DataCase.Should().Be(Part.DataOneofCase.InlineData); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(messageConnector); - - var imagePath = Path.Combine("testData", "images", "background.png"); - var image = File.ReadAllBytes(imagePath); - var message = new ImageMessage(Role.User, BinaryData.FromBytes(image, "image/png")); - message.MimeType.Should().Be("image/png"); - - await agent.SendAsync(message); - } - - [Fact] - public async Task ItProcessUserImageMessageAsFileDataAsync() - { - var messageConnector = new GeminiMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, ct) => - { - msgs.Count().Should().Be(1); - var innerMessage = msgs.First(); - innerMessage.Should().BeOfType>(); - var message = (IMessage)innerMessage; - message.Content.Parts.Count.Should().Be(1); - message.Content.Role.Should().Be("user"); - message.Content.Parts.First().DataCase.Should().Be(Part.DataOneofCase.FileData); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(messageConnector); - - var imagePath = Path.Combine("testData", "images", "image.png"); - var url = new Uri(Path.GetFullPath(imagePath)).AbsoluteUri; - var message = new ImageMessage(Role.User, url); - message.MimeType.Should().Be("image/png"); - - await agent.SendAsync(message); - } - - [Fact] - public async Task ItProcessMultiModalMessageAsync() - { - var messageConnector = new GeminiMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, ct) => - { - msgs.Count().Should().Be(1); - var innerMessage = msgs.First(); - innerMessage.Should().BeOfType>(); - var message = (IMessage)innerMessage; - message.Content.Parts.Count.Should().Be(2); - message.Content.Role.Should().Be("user"); - message.Content.Parts.First().DataCase.Should().Be(Part.DataOneofCase.Text); - message.Content.Parts.Last().DataCase.Should().Be(Part.DataOneofCase.FileData); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(messageConnector); - - var imagePath = Path.Combine("testData", "images", "image.png"); - var url = new Uri(Path.GetFullPath(imagePath)).AbsoluteUri; - var message = new ImageMessage(Role.User, url); - message.MimeType.Should().Be("image/png"); - var textMessage = new TextMessage(Role.User, "What's in this image?"); - var multiModalMessage = new MultiModalMessage(Role.User, [textMessage, message]); - - await agent.SendAsync(multiModalMessage); - } - - [Fact] - public async Task ItProcessToolCallMessageAsync() - { - var messageConnector = new GeminiMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, ct) => - { - msgs.Count().Should().Be(1); - var innerMessage = msgs.First(); - innerMessage.Should().BeOfType>(); - var message = (IMessage)innerMessage; - message.Content.Role.Should().Be("model"); - message.Content.Parts.First().DataCase.Should().Be(Part.DataOneofCase.FunctionCall); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(messageConnector); - - var toolCallMessage = new ToolCallMessage("test", "{}", "user"); - await agent.SendAsync(toolCallMessage); - } - - [Fact] - public async Task ItProcessStreamingTextMessageAsync() - { - var messageConnector = new GeminiMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterStreamingMiddleware(messageConnector); - - var messageChunks = Enumerable.Range(0, 10) - .Select(i => new GenerateContentResponse() - { - Candidates = - { - new Candidate() - { - Content = new Content() - { - Role = "user", - Parts = { new Part { Text = i.ToString() } }, - } - } - } - }) - .Select(m => MessageEnvelope.Create(m)); - - IMessage? finalReply = null; - await foreach (var reply in agent.GenerateStreamingReplyAsync(messageChunks)) - { - reply.Should().BeAssignableTo(); - finalReply = reply; - } - - finalReply.Should().BeOfType(); - var textMessage = (TextMessage)finalReply!; - textMessage.GetContent().Should().Be("0123456789"); - } - - [Fact] - public async Task ItProcessToolCallResultMessageAsync() - { - var messageConnector = new GeminiMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, ct) => - { - msgs.Count().Should().Be(1); - var innerMessage = msgs.First(); - innerMessage.Should().BeOfType>(); - var message = (IMessage)innerMessage; - message.Content.Role.Should().Be("function"); - message.Content.Parts.First().DataCase.Should().Be(Part.DataOneofCase.FunctionResponse); - message.Content.Parts.First().FunctionResponse.Response.ToString().Should().Be("{ \"result\": \"result\" }"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(messageConnector); - - var message = new ToolCallResultMessage("result", "test", "{}", "user"); - await agent.SendAsync(message); - - // when the result is already a json object string - message = new ToolCallResultMessage("{ \"result\": \"result\" }", "test", "{}", "user"); - await agent.SendAsync(message); - } - - [Fact] - public async Task ItProcessToolCallAggregateMessageAsTextContentAsync() - { - var messageConnector = new GeminiMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, ct) => - { - msgs.Count().Should().Be(1); - var innerMessage = msgs.First(); - innerMessage.Should().BeOfType>(); - var message = (IMessage)innerMessage; - message.Content.Role.Should().Be("user"); - message.Content.Parts.First().DataCase.Should().Be(Part.DataOneofCase.Text); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(messageConnector); - var toolCallMessage = new ToolCallMessage("test", "{}", "user"); - var toolCallResultMessage = new ToolCallResultMessage("result", "test", "{}", "user"); - var message = new ToolCallAggregateMessage(toolCallMessage, toolCallResultMessage, from: "user"); - await agent.SendAsync(message); - } - - [Fact] - public async Task ItProcessToolCallAggregateMessageAsFunctionContentAsync() - { - var messageConnector = new GeminiMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, ct) => - { - msgs.Count().Should().Be(2); - var functionCallMessage = msgs.First(); - functionCallMessage.Should().BeOfType>(); - var message = (IMessage)functionCallMessage; - message.Content.Role.Should().Be("model"); - message.Content.Parts.First().DataCase.Should().Be(Part.DataOneofCase.FunctionCall); - - var functionResultMessage = msgs.Last(); - functionResultMessage.Should().BeOfType>(); - message = (IMessage)functionResultMessage; - message.Content.Role.Should().Be("function"); - message.Content.Parts.First().DataCase.Should().Be(Part.DataOneofCase.FunctionResponse); - - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(messageConnector); - var toolCallMessage = new ToolCallMessage("test", "{}", agent.Name); - var toolCallResultMessage = new ToolCallResultMessage("result", "test", "{}", agent.Name); - var message = new ToolCallAggregateMessage(toolCallMessage, toolCallResultMessage, from: agent.Name); - await agent.SendAsync(message); - } - - [Fact] - public async Task ItThrowExceptionWhenProcessingUnknownMessageTypeInStrictModeAsync() - { - var messageConnector = new GeminiMessageConnector(true); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(messageConnector); - - var unknownMessage = new - { - text = "Hello", - }; - - var message = MessageEnvelope.Create(unknownMessage, from: agent.Name); - var action = new Func(async () => await agent.SendAsync(message)); - - await action.Should().ThrowAsync(); - } - - [Fact] - public async Task ItReturnUnknownMessageTypeInNonStrictModeAsync() - { - var messageConnector = new GeminiMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, ct) => - { - var message = msgs.First(); - message.Should().BeAssignableTo(); - return message; - }) - .RegisterMiddleware(messageConnector); - - var unknownMessage = new - { - text = "Hello", - }; - - var message = MessageEnvelope.Create(unknownMessage, from: agent.Name); - await agent.SendAsync(message); - } - - [Fact] - public async Task ItShortcircuitContentTypeAsync() - { - var messageConnector = new GeminiMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, ct) => - { - var message = msgs.First(); - message.Should().BeOfType>(); - - return message; - }) - .RegisterMiddleware(messageConnector); - - var message = new Content() - { - Parts = { new Part { Text = "Hello" } }, - Role = "user", - }; - - await agent.SendAsync(MessageEnvelope.Create(message, from: agent.Name)); - } -} diff --git a/dotnet/test/AutoGen.Gemini.Tests/GoogleGeminiClientTests.cs b/dotnet/test/AutoGen.Gemini.Tests/GoogleGeminiClientTests.cs deleted file mode 100644 index 4f6f934ded..0000000000 --- a/dotnet/test/AutoGen.Gemini.Tests/GoogleGeminiClientTests.cs +++ /dev/null @@ -1,138 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GoogleGeminiClientTests.cs - -using AutoGen.Tests; -using FluentAssertions; -using Google.Cloud.AIPlatform.V1; -using Google.Protobuf; -using static Google.Cloud.AIPlatform.V1.Candidate.Types; - -namespace AutoGen.Gemini.Tests; - -public class GoogleGeminiClientTests -{ - [ApiKeyFact("GOOGLE_GEMINI_API_KEY")] - public async Task ItGenerateContentAsync() - { - var apiKey = Environment.GetEnvironmentVariable("GOOGLE_GEMINI_API_KEY") ?? throw new InvalidOperationException("GOOGLE_GEMINI_API_KEY is not set"); - var client = new GoogleGeminiClient(apiKey); - var model = "gemini-1.5-flash-001"; - - var text = "Write a long, tedious story"; - var request = new GenerateContentRequest - { - Model = model, - Contents = - { - new Content - { - Role = "user", - Parts = - { - new Part - { - Text = text, - } - } - } - } - }; - var completion = await client.GenerateContentAsync(request); - - completion.Should().NotBeNull(); - completion.Candidates.Count.Should().BeGreaterThan(0); - completion.Candidates[0].Content.Parts[0].Text.Should().NotBeNullOrEmpty(); - } - - [ApiKeyFact("GOOGLE_GEMINI_API_KEY")] - public async Task ItGenerateContentWithImageAsync() - { - var apiKey = Environment.GetEnvironmentVariable("GOOGLE_GEMINI_API_KEY") ?? throw new InvalidOperationException("GOOGLE_GEMINI_API_KEY is not set"); - var client = new GoogleGeminiClient(apiKey); - var model = "gemini-1.5-flash-001"; - - var text = "what's in the image"; - var imagePath = Path.Combine("testData", "images", "background.png"); - var image = File.ReadAllBytes(imagePath); - var request = new GenerateContentRequest - { - Model = model, - Contents = - { - new Content - { - Role = "user", - Parts = - { - new Part - { - Text = text, - }, - new Part - { - InlineData = new () - { - MimeType = "image/png", - Data = ByteString.CopyFrom(image), - }, - } - } - } - } - }; - - var completion = await client.GenerateContentAsync(request); - completion.Should().NotBeNull(); - completion.Candidates.Count.Should().BeGreaterThan(0); - completion.Candidates[0].Content.Parts[0].Text.Should().NotBeNullOrEmpty(); - } - - [ApiKeyFact("GOOGLE_GEMINI_API_KEY")] - public async Task ItStreamingGenerateContentTestAsync() - { - var apiKey = Environment.GetEnvironmentVariable("GOOGLE_GEMINI_API_KEY") ?? throw new InvalidOperationException("GOOGLE_GEMINI_API_KEY is not set"); - var client = new GoogleGeminiClient(apiKey); - var model = "gemini-1.5-flash-001"; - - var text = "Tell me a long tedious joke"; - var request = new GenerateContentRequest - { - Model = model, - Contents = - { - new Content - { - Role = "user", - Parts = - { - new Part - { - Text = text, - } - } - } - } - }; - - var response = client.GenerateContentStreamAsync(request); - var chunks = new List(); - GenerateContentResponse? final = null; - await foreach (var item in response) - { - item.Candidates.Count.Should().BeGreaterThan(0); - final = item; - chunks.Add(final); - } - - chunks.Should().NotBeEmpty(); - final.Should().NotBeNull(); - final!.UsageMetadata.CandidatesTokenCount.Should().BeGreaterThan(0); - final!.Candidates[0].FinishReason.Should().Be(FinishReason.Stop); - } -} diff --git a/dotnet/test/AutoGen.Gemini.Tests/SampleTests.cs b/dotnet/test/AutoGen.Gemini.Tests/SampleTests.cs deleted file mode 100644 index 441a2b0b3d..0000000000 --- a/dotnet/test/AutoGen.Gemini.Tests/SampleTests.cs +++ /dev/null @@ -1,34 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// SampleTests.cs - -using AutoGen.Gemini.Sample; -using AutoGen.Tests; - -namespace AutoGen.Gemini.Tests; - -public class SampleTests -{ - [ApiKeyFact("GCP_VERTEX_PROJECT_ID")] - public async Task TestChatWithVertexGeminiAsync() - { - await Chat_With_Vertex_Gemini.RunAsync(); - } - - [ApiKeyFact("GCP_VERTEX_PROJECT_ID")] - public async Task TestFunctionCallWithGeminiAsync() - { - await Function_Call_With_Gemini.RunAsync(); - } - - [ApiKeyFact("GCP_VERTEX_PROJECT_ID")] - public async Task TestImageChatWithVertexGeminiAsync() - { - await Image_Chat_With_Vertex_Gemini.RunAsync(); - } -} diff --git a/dotnet/test/AutoGen.Gemini.Tests/VertexGeminiClientTests.cs b/dotnet/test/AutoGen.Gemini.Tests/VertexGeminiClientTests.cs deleted file mode 100644 index 0649567065..0000000000 --- a/dotnet/test/AutoGen.Gemini.Tests/VertexGeminiClientTests.cs +++ /dev/null @@ -1,140 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// VertexGeminiClientTests.cs - -using AutoGen.Tests; -using FluentAssertions; -using Google.Cloud.AIPlatform.V1; -using Google.Protobuf; -using static Google.Cloud.AIPlatform.V1.Candidate.Types; -namespace AutoGen.Gemini.Tests; - -public class VertexGeminiClientTests -{ - [ApiKeyFact("GCP_VERTEX_PROJECT_ID")] - public async Task ItGenerateContentAsync() - { - var location = "us-central1"; - var project = Environment.GetEnvironmentVariable("GCP_VERTEX_PROJECT_ID"); - var client = new VertexGeminiClient(location); - var model = "gemini-1.5-flash-001"; - - var text = "Hello"; - var request = new GenerateContentRequest - { - Model = $"projects/{project}/locations/{location}/publishers/google/models/{model}", - Contents = - { - new Content - { - Role = "user", - Parts = - { - new Part - { - Text = text, - } - } - } - } - }; - var completion = await client.GenerateContentAsync(request); - - completion.Should().NotBeNull(); - completion.Candidates.Count.Should().BeGreaterThan(0); - completion.Candidates[0].Content.Parts[0].Text.Should().NotBeNullOrEmpty(); - } - - [ApiKeyFact("GCP_VERTEX_PROJECT_ID")] - public async Task ItGenerateContentWithImageAsync() - { - var location = "us-central1"; - var project = Environment.GetEnvironmentVariable("GCP_VERTEX_PROJECT_ID"); - var client = new VertexGeminiClient(location); - var model = "gemini-1.5-flash-001"; - - var text = "what's in the image"; - var imagePath = Path.Combine("testData", "images", "square.png"); - var image = File.ReadAllBytes(imagePath); - var request = new GenerateContentRequest - { - Model = $"projects/{project}/locations/{location}/publishers/google/models/{model}", - Contents = - { - new Content - { - Role = "user", - Parts = - { - new Part - { - Text = text, - }, - new Part - { - InlineData = new () - { - MimeType = "image/png", - Data = ByteString.CopyFrom(image), - }, - } - } - } - } - }; - - var completion = await client.GenerateContentAsync(request); - completion.Should().NotBeNull(); - completion.Candidates.Count.Should().BeGreaterThan(0); - completion.Candidates[0].Content.Parts[0].Text.Should().NotBeNullOrEmpty(); - } - - [ApiKeyFact("GCP_VERTEX_PROJECT_ID")] - public async Task ItStreamingGenerateContentTestAsync() - { - var location = "us-central1"; - var project = Environment.GetEnvironmentVariable("GCP_VERTEX_PROJECT_ID"); - var client = new VertexGeminiClient(location); - var model = "gemini-1.5-flash-001"; - - var text = "Hello, write a long tedious joke"; - var request = new GenerateContentRequest - { - Model = $"projects/{project}/locations/{location}/publishers/google/models/{model}", - Contents = - { - new Content - { - Role = "user", - Parts = - { - new Part - { - Text = text, - } - } - } - } - }; - - var response = client.GenerateContentStreamAsync(request); - var chunks = new List(); - GenerateContentResponse? final = null; - await foreach (var item in response) - { - item.Candidates.Count.Should().BeGreaterThan(0); - final = item; - chunks.Add(final); - } - - chunks.Should().NotBeEmpty(); - final.Should().NotBeNull(); - final!.UsageMetadata.CandidatesTokenCount.Should().BeGreaterThan(0); - final!.Candidates[0].FinishReason.Should().Be(FinishReason.Stop); - } -} diff --git a/dotnet/test/AutoGen.Mistral.Tests/AutoGen.Mistral.Tests.csproj b/dotnet/test/AutoGen.Mistral.Tests/AutoGen.Mistral.Tests.csproj deleted file mode 100644 index aa20a835e9..0000000000 --- a/dotnet/test/AutoGen.Mistral.Tests/AutoGen.Mistral.Tests.csproj +++ /dev/null @@ -1,18 +0,0 @@ -ο»Ώ - - - $(TestTargetFrameworks) - enable - false - True - True - - - - - - - - - - diff --git a/dotnet/test/AutoGen.Mistral.Tests/MistralClientAgentTests.cs b/dotnet/test/AutoGen.Mistral.Tests/MistralClientAgentTests.cs deleted file mode 100644 index ddd9a76594..0000000000 --- a/dotnet/test/AutoGen.Mistral.Tests/MistralClientAgentTests.cs +++ /dev/null @@ -1,247 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MistralClientAgentTests.cs - -using System.Text.Json; -using AutoGen.Core; -using AutoGen.Mistral.Extension; -using AutoGen.Tests; -using FluentAssertions; -using Xunit.Abstractions; - -namespace AutoGen.Mistral.Tests; - -public partial class MistralClientAgentTests -{ - private ITestOutputHelper _output; - - public MistralClientAgentTests(ITestOutputHelper output) - { - _output = output; - } - - [Function] - public async Task GetWeather(string city) - { - return $"The weather in {city} is sunny."; - } - - [ApiKeyFact("MISTRAL_API_KEY")] - public async Task MistralAgentChatCompletionTestAsync() - { - var apiKey = Environment.GetEnvironmentVariable("MISTRAL_API_KEY") ?? throw new InvalidOperationException("MISTRAL_API_KEY is not set."); - var client = new MistralClient(apiKey: apiKey); - - var agent = new MistralClientAgent( - client: client, - name: "MistralClientAgent", - model: "open-mistral-7b") - .RegisterMessageConnector(); - var singleAgentTest = new SingleAgentTest(_output); - await singleAgentTest.UpperCaseTestAsync(agent); - await singleAgentTest.UpperCaseStreamingTestAsync(agent); - } - - [ApiKeyFact("MISTRAL_API_KEY")] - public async Task MistralAgentJsonModeTestAsync() - { - var apiKey = Environment.GetEnvironmentVariable("MISTRAL_API_KEY") ?? throw new InvalidOperationException("MISTRAL_API_KEY is not set."); - var client = new MistralClient(apiKey: apiKey); - - var agent = new MistralClientAgent( - client: client, - name: "MistralClientAgent", - jsonOutput: true, - systemMessage: "You are a helpful assistant that convert input to json object", - model: "open-mistral-7b", - randomSeed: 0) - .RegisterMessageConnector(); - - var reply = await agent.SendAsync("name: John, age: 41, email: g123456@gmail.com"); - reply.Should().BeOfType(); - reply.GetContent().Should().NotBeNullOrEmpty(); - reply.From.Should().Be(agent.Name); - var json = reply.GetContent(); - var person = JsonSerializer.Deserialize(json!); - - person.Should().NotBeNull(); - person!.Name.Should().Be("John"); - person!.Age.Should().Be(41); - person!.Email.Should().Be("g123456@gmail.com"); - } - - [ApiKeyFact("MISTRAL_API_KEY")] - public async Task MistralAgentFunctionCallMessageTest() - { - var apiKey = Environment.GetEnvironmentVariable("MISTRAL_API_KEY") ?? throw new InvalidOperationException("MISTRAL_API_KEY is not set."); - var client = new MistralClient(apiKey: apiKey); - var agent = new MistralClientAgent( - client: client, - name: "MistralClientAgent", - model: "mistral-small-latest", - randomSeed: 0) - .RegisterMessageConnector(); - - var weatherFunctionArguments = """ - { - "city": "Seattle" - } - """; - var functionCallResult = await this.GetWeatherWrapper(weatherFunctionArguments); - var toolCall = new ToolCall(this.GetWeatherFunctionContract.Name!, weatherFunctionArguments) - { - ToolCallId = "012345678", // Mistral AI requires the tool call id to be a length of 9 - Result = functionCallResult, - }; - IMessage[] chatHistory = [ - new TextMessage(Role.User, "what's the weather in Seattle?"), - new ToolCallMessage([toolCall], from: agent.Name), - new ToolCallResultMessage([toolCall], weatherFunctionArguments), - ]; - - var reply = await agent.SendAsync(chatHistory: chatHistory); - - reply.Should().BeOfType(); - reply.GetContent().Should().Be("The weather in Seattle is sunny."); - } - - [ApiKeyFact("MISTRAL_API_KEY")] - public async Task MistralAgentTwoAgentFunctionCallTest() - { - var apiKey = Environment.GetEnvironmentVariable("MISTRAL_API_KEY") ?? throw new InvalidOperationException("MISTRAL_API_KEY is not set."); - var client = new MistralClient(apiKey: apiKey); - var twoAgentTest = new TwoAgentTest(_output); - var functionCallMiddleware = new FunctionCallMiddleware( - functions: [twoAgentTest.GetWeatherFunctionContract]); - var functionCallAgent = new MistralClientAgent( - client: client, - name: "MistralClientAgent", - model: "mistral-small-latest", - randomSeed: 0) - .RegisterMessageConnector() - .RegisterStreamingMiddleware(functionCallMiddleware); - - var functionCallMiddlewareExecutorMiddleware = new FunctionCallMiddleware( - functionMap: new Dictionary>> - { - { twoAgentTest.GetWeatherFunctionContract.Name!, twoAgentTest.GetWeatherWrapper } - }); - var executorAgent = new MistralClientAgent( - client: client, - name: "ExecutorAgent", - model: "mistral-small-latest", - randomSeed: 0) - .RegisterMessageConnector() - .RegisterStreamingMiddleware(functionCallMiddlewareExecutorMiddleware); - await twoAgentTest.TwoAgentGetWeatherFunctionCallTestAsync(executorAgent, functionCallAgent); - } - - [ApiKeyFact("MISTRAL_API_KEY")] - public async Task MistralAgentFunctionCallMiddlewareMessageTest() - { - var apiKey = Environment.GetEnvironmentVariable("MISTRAL_API_KEY") ?? throw new InvalidOperationException("MISTRAL_API_KEY is not set."); - var client = new MistralClient(apiKey: apiKey); - var functionCallMiddleware = new FunctionCallMiddleware( - functions: [this.GetWeatherFunctionContract], - functionMap: new Dictionary>> - { - { this.GetWeatherFunctionContract.Name!, this.GetWeatherWrapper } - }); - var functionCallAgent = new MistralClientAgent( - client: client, - name: "MistralClientAgent", - model: "mistral-small-latest", - randomSeed: 0) - .RegisterMessageConnector() - .RegisterStreamingMiddleware(functionCallMiddleware); - - var question = new TextMessage(Role.User, "what's the weather in Seattle?"); - var reply = await functionCallAgent.SendAsync(question); - reply.Should().BeOfType(); - - // resend the reply to the same agent so it can generate the final response - // because the reply's from is the agent's name - // in this case, the aggregate message will be converted to tool call message + tool call result message - var finalReply = await functionCallAgent.SendAsync(chatHistory: [question, reply]); - finalReply.Should().BeOfType(); - finalReply.GetContent().Should().Be("The weather in Seattle is sunny."); - - var anotherAgent = new MistralClientAgent( - client: client, - name: "AnotherMistralClientAgent", - model: "mistral-small-latest", - randomSeed: 0) - .RegisterMessageConnector(); - - // if send the reply to another agent with different name, - // the reply will be interpreted as a plain text message - var plainTextReply = await anotherAgent.SendAsync(chatHistory: [reply, question]); - plainTextReply.Should().BeOfType(); - } - - [ApiKeyFact("MISTRAL_API_KEY")] - public async Task MistralAgentFunctionCallAutoInvokeTestAsync() - { - var apiKey = Environment.GetEnvironmentVariable("MISTRAL_API_KEY") ?? throw new InvalidOperationException("MISTRAL_API_KEY is not set."); - var client = new MistralClient(apiKey: apiKey); - var singleAgentTest = new SingleAgentTest(_output); - var functionCallMiddleware = new FunctionCallMiddleware( - functions: [singleAgentTest.EchoAsyncFunctionContract], - functionMap: new Dictionary>> - { - { singleAgentTest.EchoAsyncFunctionContract.Name!, singleAgentTest.EchoAsyncWrapper } - }); - var agent = new MistralClientAgent( - client: client, - name: "MistralClientAgent", - model: "mistral-small-latest", - toolChoice: ToolChoiceEnum.Any, - randomSeed: 0) - .RegisterMessageConnector() - .RegisterStreamingMiddleware(functionCallMiddleware); - await singleAgentTest.EchoFunctionCallExecutionTestAsync(agent); - await singleAgentTest.EchoFunctionCallExecutionStreamingTestAsync(agent); - } - - [ApiKeyFact("MISTRAL_API_KEY")] - public async Task MistralAgentFunctionCallTestAsync() - { - var apiKey = Environment.GetEnvironmentVariable("MISTRAL_API_KEY") ?? throw new InvalidOperationException("MISTRAL_API_KEY is not set."); - var client = new MistralClient(apiKey: apiKey); - var singleAgentTest = new SingleAgentTest(_output); - var functionCallMiddleware = new FunctionCallMiddleware( - functions: [singleAgentTest.EchoAsyncFunctionContract, this.GetWeatherFunctionContract]); - var agent = new MistralClientAgent( - client: client, - name: "MistralClientAgent", - model: "mistral-small-latest", - toolChoice: ToolChoiceEnum.Any, - systemMessage: "You are a helpful assistant that can call functions", - randomSeed: 0) - .RegisterMessageConnector() - .RegisterStreamingMiddleware(functionCallMiddleware); - await singleAgentTest.EchoFunctionCallTestAsync(agent); - - - // streaming test - var question = new TextMessage(Role.User, "what's the weather in Seattle?"); - IMessage? finalReply = null; - - await foreach (var reply in agent.GenerateStreamingReplyAsync([question])) - { - reply.From.Should().Be(agent.Name); - if (reply is IMessage message) - { - finalReply = message; - } - } - - finalReply.Should().NotBeNull(); - finalReply.Should().BeOfType(); - } -} diff --git a/dotnet/test/AutoGen.Mistral.Tests/MistralClientTests.cs b/dotnet/test/AutoGen.Mistral.Tests/MistralClientTests.cs deleted file mode 100644 index 34425b3441..0000000000 --- a/dotnet/test/AutoGen.Mistral.Tests/MistralClientTests.cs +++ /dev/null @@ -1,293 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MistralClientTests.cs - -using System.Text.Json; -using System.Text.Json.Serialization; -using AutoGen.Core; -using AutoGen.Mistral.Extension; -using AutoGen.Tests; -using FluentAssertions; - -namespace AutoGen.Mistral.Tests; - -public partial class MistralClientTests -{ - [Function] - public async Task GetWeather(string city) - { - return $"The weather in {city} is sunny."; - } - - [ApiKeyFact("MISTRAL_API_KEY")] - public async Task MistralClientChatCompletionTestAsync() - { - var apiKey = Environment.GetEnvironmentVariable("MISTRAL_API_KEY") ?? throw new InvalidOperationException("MISTRAL_API_KEY is not set."); - var client = new MistralClient(apiKey: apiKey); - - var systemMessage = new ChatMessage(ChatMessage.RoleEnum.System, "You are a helpful assistant."); - var userMessage = new ChatMessage(ChatMessage.RoleEnum.User, "What is the weather like today?"); - - var request = new ChatCompletionRequest( - model: "open-mistral-7b", - messages: new List { systemMessage, userMessage }, - temperature: 0); - - var response = await client.CreateChatCompletionsAsync(request); - - response.Choices!.Count().Should().Be(1); - response.Choices!.First().Message!.Content.Should().NotBeNullOrEmpty(); - response.Choices!.First().Message!.Role.Should().Be(ChatMessage.RoleEnum.Assistant); - response.Usage!.TotalTokens.Should().BeGreaterThan(0); - } - - [ApiKeyFact("MISTRAL_API_KEY")] - public async Task MistralClientStreamingChatCompletionTestAsync() - { - var apiKey = Environment.GetEnvironmentVariable("MISTRAL_API_KEY") ?? throw new InvalidOperationException("MISTRAL_API_KEY is not set."); - var client = new MistralClient(apiKey: apiKey); - - var systemMessage = new ChatMessage(ChatMessage.RoleEnum.System, "You are a helpful assistant."); - var userMessage = new ChatMessage(ChatMessage.RoleEnum.User, "What is the weather like today?"); - - var request = new ChatCompletionRequest( - model: "open-mistral-7b", - messages: new List { systemMessage, userMessage }, - temperature: 0); - - var response = client.StreamingChatCompletionsAsync(request); - var results = new List(); - - await foreach (var item in response) - { - results.Add(item); - item.VarObject.Should().Be("chat.completion.chunk"); - } - - results.Count.Should().BeGreaterThan(0); - - // merge result - var finalResult = results.First(); - foreach (var result in results) - { - if (finalResult.Choices!.First().Message is null) - { - finalResult.Choices!.First().Message = result.Choices!.First().Delta; - } - else - { - finalResult.Choices!.First().Message!.Content += result.Choices!.First().Delta!.Content; - } - - // the usage information will be included in the last result - if (result.Usage != null) - { - finalResult.Usage = result.Usage; - } - } - finalResult.Choices!.First().Message!.Content.Should().NotBeNullOrEmpty(); - finalResult.Choices!.First().Message!.Role.Should().Be(ChatMessage.RoleEnum.Assistant); - finalResult.Usage!.TotalTokens.Should().BeGreaterThan(0); - } - - [ApiKeyFact("MISTRAL_API_KEY")] - public async Task MistralClientStreamingChatJsonModeCompletionTestAsync() - { - var apiKey = Environment.GetEnvironmentVariable("MISTRAL_API_KEY") ?? throw new InvalidOperationException("MISTRAL_API_KEY is not set."); - var client = new MistralClient(apiKey: apiKey); - - var systemMessage = new ChatMessage(ChatMessage.RoleEnum.System, "You are a helpful assistant that convert input to json object"); - var userMessage = new ChatMessage(ChatMessage.RoleEnum.User, "name: John, age: 41, email: g123456@gmail.com"); - - var request = new ChatCompletionRequest( - model: "open-mistral-7b", - messages: new List { systemMessage, userMessage }, - temperature: 0) - { - ResponseFormat = new ResponseFormat { ResponseFormatType = "json_object" }, - }; - - var response = client.StreamingChatCompletionsAsync(request); - var results = new List(); - - await foreach (var item in response) - { - results.Add(item); - item.VarObject.Should().Be("chat.completion.chunk"); - } - - results.Count.Should().BeGreaterThan(0); - - // merge result - var finalResult = results.First(); - foreach (var result in results) - { - if (finalResult.Choices!.First().Message is null) - { - finalResult.Choices!.First().Message = result.Choices!.First().Delta; - } - else - { - finalResult.Choices!.First().Message!.Content += result.Choices!.First().Delta!.Content; - } - - // the usage information will be included in the last result - if (result.Usage != null) - { - finalResult.Usage = result.Usage; - } - } - - finalResult.Choices!.First().Message!.Content.Should().NotBeNullOrEmpty(); - finalResult.Choices!.First().Message!.Role.Should().Be(ChatMessage.RoleEnum.Assistant); - finalResult.Usage!.TotalTokens.Should().BeGreaterThan(0); - var responseContent = finalResult.Choices!.First().Message!.Content ?? throw new InvalidOperationException("Response content is null."); - var person = JsonSerializer.Deserialize(responseContent); - person.Should().NotBeNull(); - - person!.Name.Should().Be("John"); - person!.Age.Should().Be(41); - person!.Email.Should().Be("g123456@gmail.com"); - } - - [ApiKeyFact("MISTRAL_API_KEY")] - public async Task MistralClientJsonModeTestAsync() - { - var apiKey = Environment.GetEnvironmentVariable("MISTRAL_API_KEY") ?? throw new InvalidOperationException("MISTRAL_API_KEY is not set."); - var client = new MistralClient(apiKey: apiKey); - - var systemMessage = new ChatMessage(ChatMessage.RoleEnum.System, "You are a helpful assistant that convert input to json object"); - var userMessage = new ChatMessage(ChatMessage.RoleEnum.User, "name: John, age: 41, email: g123456@gmail.com"); - - var request = new ChatCompletionRequest( - model: "open-mistral-7b", - messages: new List { systemMessage, userMessage }, - temperature: 0) - { - ResponseFormat = new ResponseFormat { ResponseFormatType = "json_object" }, - }; - - var response = await client.CreateChatCompletionsAsync(request); - - response.Choices!.Count().Should().Be(1); - response.Choices!.First().Message!.Content.Should().NotBeNullOrEmpty(); - response.Choices!.First().Message!.Role.Should().Be(ChatMessage.RoleEnum.Assistant); - response.Usage!.TotalTokens.Should().BeGreaterThan(0); - - // check if the response is a valid json object - var responseContent = response.Choices!.First().Message!.Content ?? throw new InvalidOperationException("Response content is null."); - var person = JsonSerializer.Deserialize(responseContent); - person.Should().NotBeNull(); - - person!.Name.Should().Be("John"); - person!.Age.Should().Be(41); - person!.Email.Should().Be("g123456@gmail.com"); - } - - - [ApiKeyFact("MISTRAL_API_KEY")] - public async Task MistralClientFunctionCallTestAsync() - { - var apiKey = Environment.GetEnvironmentVariable("MISTRAL_API_KEY") ?? throw new InvalidOperationException("MISTRAL_API_KEY is not set."); - using var client = new MistralClient(apiKey: apiKey); - - var getWeatherFunctionContract = this.GetWeatherFunctionContract; - var functionDefinition = getWeatherFunctionContract.ToMistralFunctionDefinition(); - - var systemMessage = new ChatMessage(ChatMessage.RoleEnum.System, "You are a helpful assistant."); - var userMessage = new ChatMessage(ChatMessage.RoleEnum.User, "What is the weather in Seattle?"); - - var request = new ChatCompletionRequest( - model: "mistral-small-latest", // only large or small latest models support function calls - messages: new List { systemMessage, userMessage }, - temperature: 0) - { - Tools = [new FunctionTool(functionDefinition)], - ToolChoice = ToolChoiceEnum.Any, - }; - - var response = await client.CreateChatCompletionsAsync(request); - - response.Choices!.Count().Should().Be(1); - response.Choices!.First().Message!.Content.Should().BeNullOrEmpty(); - response.Choices!.First().FinishReason.Should().Be(Choice.FinishReasonEnum.ToolCalls); - response.Choices!.First().Message!.ToolCalls!.Count.Should().Be(1); - response.Choices!.First().Message!.ToolCalls!.First().Function.Name.Should().Be("GetWeather"); - } - - [ApiKeyFact("MISTRAL_API_KEY")] - public async Task MistralClientStreamingFunctionCallTestAsync() - { - var apiKey = Environment.GetEnvironmentVariable("MISTRAL_API_KEY") ?? throw new InvalidOperationException("MISTRAL_API_KEY is not set."); - using var client = new MistralClient(apiKey: apiKey); - - var getWeatherFunctionContract = this.GetWeatherFunctionContract; - var functionDefinition = getWeatherFunctionContract.ToMistralFunctionDefinition(); - - var systemMessage = new ChatMessage(ChatMessage.RoleEnum.System, "You are a helpful assistant."); - var userMessage = new ChatMessage(ChatMessage.RoleEnum.User, "What is the weather in Seattle?"); - - var request = new ChatCompletionRequest( - model: "mistral-small-latest", - messages: new List { systemMessage, userMessage }, - temperature: 0) - { - Tools = [new FunctionTool(functionDefinition)], - ToolChoice = ToolChoiceEnum.Any, - }; - - var response = client.StreamingChatCompletionsAsync(request); - - var results = new List(); - await foreach (var item in response) - { - results.Add(item); - item.VarObject.Should().Be("chat.completion.chunk"); - } - - // merge result - var finalResult = results.First(); - var lastResult = results.Last(); - lastResult.Choices!.First().FinishReason.Should().Be(Choice.FinishReasonEnum.ToolCalls); - - foreach (var result in results) - { - if (finalResult.Choices!.First().Message is null) - { - finalResult.Choices!.First().Message = result.Choices!.First().Delta; - finalResult.Choices!.First().Message!.ToolCalls = []; - } - else - { - finalResult.Choices!.First().Message!.ToolCalls = finalResult.Choices!.First().Message!.ToolCalls!.Concat(result.Choices!.First().Delta!.ToolCalls!).ToList(); - } - - // the usage information will be included in the last result - if (result.Usage != null) - { - finalResult.Usage = result.Usage; - } - } - - finalResult.Choices!.First().Message!.Content.Should().BeNullOrEmpty(); - finalResult.Choices!.First().Message!.ToolCalls!.Count.Should().BeGreaterThan(0); - finalResult.Usage!.TotalTokens.Should().BeGreaterThan(0); - finalResult.Choices!.First().Message!.ToolCalls!.First().Function.Name.Should().Be("GetWeather"); - } -} -public class Person -{ - [JsonPropertyName("name")] - public string Name { get; set; } = string.Empty; - - [JsonPropertyName("age")] - public int Age { get; set; } - - [JsonPropertyName("email")] - public string Email { get; set; } = string.Empty; -} diff --git a/dotnet/test/AutoGen.Ollama.Tests/AutoGen.Ollama.Tests.csproj b/dotnet/test/AutoGen.Ollama.Tests/AutoGen.Ollama.Tests.csproj deleted file mode 100644 index c5ca195562..0000000000 --- a/dotnet/test/AutoGen.Ollama.Tests/AutoGen.Ollama.Tests.csproj +++ /dev/null @@ -1,25 +0,0 @@ - - - - $(TestTargetFrameworks) - enable - false - True - True - - - - - - - - - - PreserveNewest - - - PreserveNewest - - - - diff --git a/dotnet/test/AutoGen.Ollama.Tests/OllamaAgentTests.cs b/dotnet/test/AutoGen.Ollama.Tests/OllamaAgentTests.cs deleted file mode 100644 index efcb015dda..0000000000 --- a/dotnet/test/AutoGen.Ollama.Tests/OllamaAgentTests.cs +++ /dev/null @@ -1,230 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OllamaAgentTests.cs - -using System.Text.Json; -using AutoGen.Core; -using AutoGen.Ollama.Extension; -using AutoGen.Tests; -using FluentAssertions; - -namespace AutoGen.Ollama.Tests; - -public class OllamaAgentTests -{ - [ApiKeyFact("OLLAMA_HOST", "OLLAMA_MODEL_NAME")] - public async Task GenerateReplyAsync_ReturnsValidMessage_WhenCalled() - { - string host = Environment.GetEnvironmentVariable("OLLAMA_HOST") - ?? throw new InvalidOperationException("OLLAMA_HOST is not set."); - string modelName = Environment.GetEnvironmentVariable("OLLAMA_MODEL_NAME") - ?? throw new InvalidOperationException("OLLAMA_MODEL_NAME is not set."); - OllamaAgent ollamaAgent = BuildOllamaAgent(host, modelName); - - var message = new Message("user", "hey how are you"); - var messages = new IMessage[] { MessageEnvelope.Create(message, from: modelName) }; - IMessage result = await ollamaAgent.GenerateReplyAsync(messages); - - result.Should().NotBeNull(); - result.Should().BeOfType>(); - result.From.Should().Be(ollamaAgent.Name); - } - - [ApiKeyFact("OLLAMA_HOST", "OLLAMA_MODEL_NAME")] - public async Task GenerateReplyAsync_ReturnsValidJsonMessageContent_WhenCalled() - { - string host = Environment.GetEnvironmentVariable("OLLAMA_HOST") - ?? throw new InvalidOperationException("OLLAMA_HOST is not set."); - string modelName = Environment.GetEnvironmentVariable("OLLAMA_MODEL_NAME") - ?? throw new InvalidOperationException("OLLAMA_MODEL_NAME is not set."); - OllamaAgent ollamaAgent = BuildOllamaAgent(host, modelName); - - var message = new Message("user", "What color is the sky at different times of the day? Respond using JSON"); - var messages = new IMessage[] { MessageEnvelope.Create(message, from: modelName) }; - IMessage result = await ollamaAgent.GenerateReplyAsync(messages, new OllamaReplyOptions - { - Format = FormatType.Json - }); - - result.Should().NotBeNull(); - result.Should().BeOfType>(); - result.From.Should().Be(ollamaAgent.Name); - - string jsonContent = ((MessageEnvelope)result).Content.Message!.Value; - bool isValidJson = IsValidJsonMessage(jsonContent); - isValidJson.Should().BeTrue(); - } - - [ApiKeyFact("OLLAMA_HOST", "OLLAMA_MODEL_NAME")] - public async Task GenerateStreamingReplyAsync_ReturnsValidMessages_WhenCalled() - { - string host = Environment.GetEnvironmentVariable("OLLAMA_HOST") - ?? throw new InvalidOperationException("OLLAMA_HOST is not set."); - string modelName = Environment.GetEnvironmentVariable("OLLAMA_MODEL_NAME") - ?? throw new InvalidOperationException("OLLAMA_MODEL_NAME is not set."); - OllamaAgent ollamaAgent = BuildOllamaAgent(host, modelName); - - var msg = new Message("user", "hey how are you"); - var messages = new IMessage[] { MessageEnvelope.Create(msg, from: modelName) }; - IMessage? finalReply = default; - await foreach (IMessage message in ollamaAgent.GenerateStreamingReplyAsync(messages)) - { - message.Should().NotBeNull(); - message.From.Should().Be(ollamaAgent.Name); - var streamingMessage = (IMessage)message; - if (streamingMessage.Content.Done) - { - finalReply = message; - break; - } - else - { - streamingMessage.Content.Message.Should().NotBeNull(); - streamingMessage.Content.Done.Should().BeFalse(); - } - } - - finalReply.Should().BeOfType>(); - var update = ((MessageEnvelope)finalReply!).Content; - update.Done.Should().BeTrue(); - update.TotalDuration.Should().BeGreaterThan(0); - } - - [ApiKeyFact("OLLAMA_HOST")] - public async Task ItReturnsValidMessageUsingLLavaAsync() - { - var host = Environment.GetEnvironmentVariable("OLLAMA_HOST") - ?? throw new InvalidOperationException("OLLAMA_HOST is not set."); - var modelName = "llava:latest"; - var ollamaAgent = BuildOllamaAgent(host, modelName); - var imagePath = Path.Combine("images", "image.png"); - var base64Image = Convert.ToBase64String(File.ReadAllBytes(imagePath)); - var message = new Message() - { - Role = "user", - Value = "What's the color of the background in this image", - Images = [base64Image], - }; - - var messages = new IMessage[] { MessageEnvelope.Create(message, from: modelName) }; - var reply = await ollamaAgent.GenerateReplyAsync(messages); - - reply.Should().BeOfType>(); - var chatResponse = ((MessageEnvelope)reply).Content; - chatResponse.Message.Should().NotBeNull(); - } - - [ApiKeyFact("OLLAMA_HOST")] - public async Task ItCanProcessMultiModalMessageUsingLLavaAsync() - { - var host = Environment.GetEnvironmentVariable("OLLAMA_HOST") - ?? throw new InvalidOperationException("OLLAMA_HOST is not set."); - var modelName = "llava:latest"; - var ollamaAgent = BuildOllamaAgent(host, modelName) - .RegisterMessageConnector(); - var image = Path.Combine("images", "image.png"); - var binaryData = BinaryData.FromBytes(File.ReadAllBytes(image), "image/png"); - var imageMessage = new ImageMessage(Role.User, binaryData); - var textMessage = new TextMessage(Role.User, "What's in this image?"); - var multiModalMessage = new MultiModalMessage(Role.User, [textMessage, imageMessage]); - - var reply = await ollamaAgent.SendAsync(multiModalMessage); - reply.Should().BeOfType(); - reply.GetRole().Should().Be(Role.Assistant); - reply.GetContent().Should().NotBeNullOrEmpty(); - reply.From.Should().Be(ollamaAgent.Name); - } - - [ApiKeyFact("OLLAMA_HOST")] - public async Task ItCanProcessImageMessageUsingLLavaAsync() - { - var host = Environment.GetEnvironmentVariable("OLLAMA_HOST") - ?? throw new InvalidOperationException("OLLAMA_HOST is not set."); - var modelName = "llava:latest"; - var ollamaAgent = BuildOllamaAgent(host, modelName) - .RegisterMessageConnector(); - var image = Path.Combine("images", "image.png"); - var binaryData = BinaryData.FromBytes(File.ReadAllBytes(image), "image/png"); - var imageMessage = new ImageMessage(Role.User, binaryData); - - var reply = await ollamaAgent.SendAsync(imageMessage); - reply.Should().BeOfType(); - reply.GetRole().Should().Be(Role.Assistant); - reply.GetContent().Should().NotBeNullOrEmpty(); - reply.From.Should().Be(ollamaAgent.Name); - } - - [ApiKeyFact("OLLAMA_HOST")] - public async Task ItReturnsValidStreamingMessageUsingLLavaAsync() - { - var host = Environment.GetEnvironmentVariable("OLLAMA_HOST") - ?? throw new InvalidOperationException("OLLAMA_HOST is not set."); - var modelName = "llava:latest"; - var ollamaAgent = BuildOllamaAgent(host, modelName); - var squareImagePath = Path.Combine("images", "square.png"); - var base64Image = Convert.ToBase64String(File.ReadAllBytes(squareImagePath)); - var imageMessage = new Message() - { - Role = "user", - Value = "What's in this image?", - Images = [base64Image], - }; - - var messages = new IMessage[] { MessageEnvelope.Create(imageMessage, from: modelName) }; - - IMessage? finalReply = default; - await foreach (IMessage message in ollamaAgent.GenerateStreamingReplyAsync(messages)) - { - message.Should().NotBeNull(); - message.From.Should().Be(ollamaAgent.Name); - var streamingMessage = (IMessage)message; - if (streamingMessage.Content.Done) - { - finalReply = message; - break; - } - else - { - streamingMessage.Content.Message.Should().NotBeNull(); - streamingMessage.Content.Done.Should().BeFalse(); - } - } - - finalReply.Should().BeOfType>(); - var update = ((MessageEnvelope)finalReply!).Content; - update.Done.Should().BeTrue(); - update.TotalDuration.Should().BeGreaterThan(0); - } - - private static bool IsValidJsonMessage(string input) - { - try - { - JsonDocument.Parse(input); - return true; - } - catch (JsonException) - { - return false; - } - catch (Exception ex) - { - Console.WriteLine("An unexpected exception occurred: " + ex.Message); - return false; - } - } - - private static OllamaAgent BuildOllamaAgent(string host, string modelName) - { - var httpClient = new HttpClient - { - BaseAddress = new Uri(host) - }; - return new OllamaAgent(httpClient, "TestAgent", modelName); - } -} diff --git a/dotnet/test/AutoGen.Ollama.Tests/OllamaMessageTests.cs b/dotnet/test/AutoGen.Ollama.Tests/OllamaMessageTests.cs deleted file mode 100644 index e0316c5fe3..0000000000 --- a/dotnet/test/AutoGen.Ollama.Tests/OllamaMessageTests.cs +++ /dev/null @@ -1,182 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OllamaMessageTests.cs - -using AutoGen.Core; -using AutoGen.Tests; -using FluentAssertions; -using Xunit; -namespace AutoGen.Ollama.Tests; - -public class OllamaMessageTests -{ - [Fact] - public async Task ItProcessUserTextMessageAsync() - { - var messageConnector = new OllamaMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, ct) => - { - msgs.Count().Should().Be(1); - var innerMessage = msgs.First(); - innerMessage.Should().BeOfType>(); - var message = (IMessage)innerMessage; - message.Content.Value.Should().Be("Hello"); - message.Content.Images.Should().BeNullOrEmpty(); - message.Content.Role.Should().Be("user"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(messageConnector); - - // when from is null and role is user - await agent.SendAsync("Hello"); - - // when from is user and role is user - var userMessage = new TextMessage(Role.User, "Hello", from: "user"); - await agent.SendAsync(userMessage); - - // when from is user but role is assistant - userMessage = new TextMessage(Role.Assistant, "Hello", from: "user"); - await agent.SendAsync(userMessage); - } - - [Fact] - public async Task ItProcessStreamingTextMessageAsync() - { - var messageConnector = new OllamaMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterStreamingMiddleware(messageConnector); - - var messageChunks = Enumerable.Range(0, 10) - .Select(i => new ChatResponseUpdate() - { - Message = new Message() - { - Value = i.ToString(), - Role = "assistant", - } - }) - .Select(m => MessageEnvelope.Create(m)); - - IMessage? finalReply = null; - await foreach (var reply in agent.GenerateStreamingReplyAsync(messageChunks)) - { - reply.Should().BeAssignableTo(); - finalReply = reply; - } - - finalReply.Should().BeOfType(); - var textMessage = (TextMessage)finalReply!; - textMessage.GetContent().Should().Be("0123456789"); - } - - [Fact] - public async Task ItProcessAssistantTextMessageAsync() - { - var messageConnector = new OllamaMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, ct) => - { - msgs.Count().Should().Be(1); - var innerMessage = msgs.First(); - innerMessage.Should().BeOfType>(); - var message = (IMessage)innerMessage; - message.Content.Value.Should().Be("Hello"); - message.Content.Images.Should().BeNullOrEmpty(); - message.Content.Role.Should().Be("assistant"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(messageConnector); - - // when from is null and role is assistant - var assistantMessage = new TextMessage(Role.Assistant, "Hello"); - await agent.SendAsync(assistantMessage); - - // when from is assistant and role is assistant - assistantMessage = new TextMessage(Role.Assistant, "Hello", from: "assistant"); - await agent.SendAsync(assistantMessage); - - // when from is assistant but role is user - assistantMessage = new TextMessage(Role.User, "Hello", from: "assistant"); - await agent.SendAsync(assistantMessage); - } - - [Fact] - public async Task ItProcessSystemTextMessageAsync() - { - var messageConnector = new OllamaMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, ct) => - { - msgs.Count().Should().Be(1); - var innerMessage = msgs.First(); - innerMessage.Should().BeOfType>(); - var message = (IMessage)innerMessage; - message.Content.Value.Should().Be("Hello"); - message.Content.Images.Should().BeNullOrEmpty(); - message.Content.Role.Should().Be("system"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(messageConnector); - - // when role is system - var systemMessage = new TextMessage(Role.System, "Hello"); - await agent.SendAsync(systemMessage); - } - - [Fact] - public async Task ItProcessImageMessageAsync() - { - var messageConnector = new OllamaMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, ct) => - { - msgs.Count().Should().Be(1); - var innerMessage = msgs.First(); - innerMessage.Should().BeOfType>(); - var message = (IMessage)innerMessage; - message.Content.Images!.Count.Should().Be(1); - message.Content.Role.Should().Be("user"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(messageConnector); - - var square = Path.Combine("images", "square.png"); - BinaryData imageBinaryData = BinaryData.FromBytes(File.ReadAllBytes(square), "image/png"); - var imageMessage = new ImageMessage(Role.User, imageBinaryData); - await agent.SendAsync(imageMessage); - } - - [Fact] - public async Task ItProcessMultiModalMessageAsync() - { - var messageConnector = new OllamaMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, ct) => - { - msgs.Count().Should().Be(1); - var message = msgs.First(); - message.Should().BeOfType>(); - - var multiModalMessage = (IMessage)message; - multiModalMessage.Content.Images!.Count.Should().Be(1); - multiModalMessage.Content.Value.Should().Be("Hello"); - - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(messageConnector); - - var square = Path.Combine("images", "square.png"); - BinaryData imageBinaryData = BinaryData.FromBytes(File.ReadAllBytes(square), "image/png"); - var imageMessage = new ImageMessage(Role.User, imageBinaryData); - var textMessage = new TextMessage(Role.User, "Hello"); - var multiModalMessage = new MultiModalMessage(Role.User, [textMessage, imageMessage]); - - await agent.SendAsync(multiModalMessage); - } -} diff --git a/dotnet/test/AutoGen.Ollama.Tests/OllamaTextEmbeddingServiceTests.cs b/dotnet/test/AutoGen.Ollama.Tests/OllamaTextEmbeddingServiceTests.cs deleted file mode 100644 index a4f2fabb8d..0000000000 --- a/dotnet/test/AutoGen.Ollama.Tests/OllamaTextEmbeddingServiceTests.cs +++ /dev/null @@ -1,33 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OllamaTextEmbeddingServiceTests.cs - -using AutoGen.Tests; -using FluentAssertions; - -namespace AutoGen.Ollama.Tests; - -public class OllamaTextEmbeddingServiceTests -{ - [ApiKeyFact("OLLAMA_HOST", "OLLAMA_EMBEDDING_MODEL_NAME")] - public async Task GenerateAsync_ReturnsEmbeddings_WhenApiResponseIsSuccessful() - { - string host = Environment.GetEnvironmentVariable("OLLAMA_HOST") - ?? throw new InvalidOperationException("OLLAMA_HOST is not set."); - string embeddingModelName = Environment.GetEnvironmentVariable("OLLAMA_EMBEDDING_MODEL_NAME") - ?? throw new InvalidOperationException("OLLAMA_EMBEDDING_MODEL_NAME is not set."); - var httpClient = new HttpClient - { - BaseAddress = new Uri(host) - }; - var request = new TextEmbeddingsRequest { Model = embeddingModelName, Prompt = "Llamas are members of the camelid family", }; - var service = new OllamaTextEmbeddingService(httpClient); - TextEmbeddingsResponse response = await service.GenerateAsync(request); - response.Should().NotBeNull(); - } -} diff --git a/dotnet/test/AutoGen.Ollama.Tests/images/image.png b/dotnet/test/AutoGen.Ollama.Tests/images/image.png deleted file mode 100644 index ca276f81f5..0000000000 --- a/dotnet/test/AutoGen.Ollama.Tests/images/image.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:300b7c9d6ba0c23a3e52fbd2e268141ddcca0434a9fb9dcf7e58e7e903d36dcf -size 2126185 diff --git a/dotnet/test/AutoGen.Ollama.Tests/images/square.png b/dotnet/test/AutoGen.Ollama.Tests/images/square.png deleted file mode 100644 index afb4f4cd4d..0000000000 --- a/dotnet/test/AutoGen.Ollama.Tests/images/square.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8323d0b8eceb752e14c29543b2e28bb2fc648ed9719095c31b7708867a4dc918 -size 491 diff --git a/dotnet/test/AutoGen.OpenAI.V1.Tests/ApprovalTests/OpenAIMessageTests.BasicMessageTest.approved.txt b/dotnet/test/AutoGen.OpenAI.V1.Tests/ApprovalTests/OpenAIMessageTests.BasicMessageTest.approved.txt deleted file mode 100644 index e8e9af84db..0000000000 --- a/dotnet/test/AutoGen.OpenAI.V1.Tests/ApprovalTests/OpenAIMessageTests.BasicMessageTest.approved.txt +++ /dev/null @@ -1,174 +0,0 @@ -[ - { - "OriginalMessage": "TextMessage(system, You are a helpful AI assistant, )", - "ConvertedMessages": [ - { - "Name": null, - "Role": "system", - "Content": "You are a helpful AI assistant" - } - ] - }, - { - "OriginalMessage": "TextMessage(user, Hello, user)", - "ConvertedMessages": [ - { - "Role": "user", - "Content": "Hello", - "Name": "user", - "MultiModaItem": null - } - ] - }, - { - "OriginalMessage": "TextMessage(assistant, How can I help you?, assistant)", - "ConvertedMessages": [ - { - "Role": "assistant", - "Content": "How can I help you?", - "Name": "assistant", - "TooCall": [], - "FunctionCallName": null, - "FunctionCallArguments": null - } - ] - }, - { - "OriginalMessage": "ImageMessage(user, https://example.com/image.png, user)", - "ConvertedMessages": [ - { - "Role": "user", - "Content": null, - "Name": "user", - "MultiModaItem": [ - { - "Type": "Image", - "ImageUrl": { - "Url": "https://example.com/image.png", - "Detail": null - } - } - ] - } - ] - }, - { - "OriginalMessage": "MultiModalMessage(assistant, user)\n\tTextMessage(user, Hello, user)\n\tImageMessage(user, https://example.com/image.png, user)", - "ConvertedMessages": [ - { - "Role": "user", - "Content": null, - "Name": "user", - "MultiModaItem": [ - { - "Type": "Text", - "Text": "Hello" - }, - { - "Type": "Image", - "ImageUrl": { - "Url": "https://example.com/image.png", - "Detail": null - } - } - ] - } - ] - }, - { - "OriginalMessage": "ToolCallMessage(assistant)\n\tToolCall(test, test, )", - "ConvertedMessages": [ - { - "Role": "assistant", - "Content": "", - "Name": "assistant", - "TooCall": [ - { - "Type": "Function", - "Name": "test", - "Arguments": "test", - "Id": "test" - } - ], - "FunctionCallName": null, - "FunctionCallArguments": null - } - ] - }, - { - "OriginalMessage": "ToolCallResultMessage(user)\n\tToolCall(test, test, result)", - "ConvertedMessages": [ - { - "Role": "tool", - "Content": "result", - "ToolCallId": "test" - } - ] - }, - { - "OriginalMessage": "ToolCallResultMessage(user)\n\tToolCall(result, test, test)\n\tToolCall(result, test, test)", - "ConvertedMessages": [ - { - "Role": "tool", - "Content": "test", - "ToolCallId": "result_0" - }, - { - "Role": "tool", - "Content": "test", - "ToolCallId": "result_1" - } - ] - }, - { - "OriginalMessage": "ToolCallMessage(assistant)\n\tToolCall(test, test, )\n\tToolCall(test, test, )", - "ConvertedMessages": [ - { - "Role": "assistant", - "Content": "", - "Name": "assistant", - "TooCall": [ - { - "Type": "Function", - "Name": "test", - "Arguments": "test", - "Id": "test_0" - }, - { - "Type": "Function", - "Name": "test", - "Arguments": "test", - "Id": "test_1" - } - ], - "FunctionCallName": null, - "FunctionCallArguments": null - } - ] - }, - { - "OriginalMessage": "AggregateMessage(assistant)\n\tToolCallMessage(assistant)\n\tToolCall(test, test, )\n\tToolCallResultMessage(assistant)\n\tToolCall(test, test, result)", - "ConvertedMessages": [ - { - "Role": "assistant", - "Content": "", - "Name": "assistant", - "TooCall": [ - { - "Type": "Function", - "Name": "test", - "Arguments": "test", - "Id": "test" - } - ], - "FunctionCallName": null, - "FunctionCallArguments": null - }, - { - "Role": "tool", - "Content": "result", - "ToolCallId": "test" - } - ] - } -] \ No newline at end of file diff --git a/dotnet/test/AutoGen.OpenAI.V1.Tests/AutoGen.OpenAI.V1.Tests.csproj b/dotnet/test/AutoGen.OpenAI.V1.Tests/AutoGen.OpenAI.V1.Tests.csproj deleted file mode 100644 index 74d7d7b0a1..0000000000 --- a/dotnet/test/AutoGen.OpenAI.V1.Tests/AutoGen.OpenAI.V1.Tests.csproj +++ /dev/null @@ -1,25 +0,0 @@ -ο»Ώ - - - $(TestTargetFrameworks) - false - True - True - - - - - - - - - - - - $([System.String]::Copy('%(FileName)').Split('.')[0]) - $(ProjectExt.Replace('proj', '')) - %(ParentFile)%(ParentExtension) - - - - diff --git a/dotnet/test/AutoGen.OpenAI.V1.Tests/GlobalUsing.cs b/dotnet/test/AutoGen.OpenAI.V1.Tests/GlobalUsing.cs deleted file mode 100644 index 3c28defb3c..0000000000 --- a/dotnet/test/AutoGen.OpenAI.V1.Tests/GlobalUsing.cs +++ /dev/null @@ -1,10 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GlobalUsing.cs - -global using AutoGen.Core; diff --git a/dotnet/test/AutoGen.OpenAI.V1.Tests/MathClassTest.cs b/dotnet/test/AutoGen.OpenAI.V1.Tests/MathClassTest.cs deleted file mode 100644 index 4aa9058793..0000000000 --- a/dotnet/test/AutoGen.OpenAI.V1.Tests/MathClassTest.cs +++ /dev/null @@ -1,228 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MathClassTest.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using AutoGen.OpenAI.V1.Extension; -using AutoGen.Tests; -using Azure.AI.OpenAI; -using FluentAssertions; -using Xunit.Abstractions; - -namespace AutoGen.OpenAI.V1.Tests -{ - public partial class MathClassTest - { - private readonly ITestOutputHelper _output; - - // as of 2024-05-20, aoai return 500 error when round > 1 - // I'm pretty sure that round > 5 was supported before - // So this is probably some wield regression on aoai side - // I'll keep this test case here for now, plus setting round to 1 - // so the test can still pass. - // In the future, we should rewind this test case to round > 1 (previously was 5) - private int round = 1; - public MathClassTest(ITestOutputHelper output) - { - _output = output; - } - - private Task Print(IEnumerable messages, GenerateReplyOptions? option, IAgent agent, CancellationToken ct) - { - try - { - var reply = agent.GenerateReplyAsync(messages, option, ct).Result; - - _output.WriteLine(reply.FormatMessage()); - return Task.FromResult(reply); - } - catch (Exception) - { - _output.WriteLine("Request failed"); - _output.WriteLine($"agent name: {agent.Name}"); - foreach (var message in messages) - { - _output.WriteLine(message.FormatMessage()); - } - - throw; - } - - } - - [FunctionAttribute] - public async Task CreateMathQuestion(string question, int question_index) - { - return $@"[MATH_QUESTION] -Question {question_index}: -{question} - -Student, please answer"; - } - - [FunctionAttribute] - public async Task AnswerQuestion(string answer) - { - return $@"[MATH_ANSWER] -The answer is {answer} -teacher please check answer"; - } - - [FunctionAttribute] - public async Task AnswerIsCorrect(string message) - { - return $@"[ANSWER_IS_CORRECT] -{message} -please update progress"; - } - - [FunctionAttribute] - public async Task UpdateProgress(int correctAnswerCount) - { - if (correctAnswerCount >= this.round) - { - return $@"[UPDATE_PROGRESS] -{GroupChatExtension.TERMINATE}"; - } - else - { - return $@"[UPDATE_PROGRESS] -the number of resolved question is {correctAnswerCount} -teacher, please create the next math question"; - } - } - - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task OpenAIAgentMathChatTestAsync() - { - var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new ArgumentException("AZURE_OPENAI_API_KEY is not set"); - var endPoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new ArgumentException("AZURE_OPENAI_ENDPOINT is not set"); - var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new ArgumentException("AZURE_OPENAI_DEPLOY_NAME is not set"); - var openaiClient = new OpenAIClient(new Uri(endPoint), new Azure.AzureKeyCredential(key)); - var teacher = await CreateTeacherAgentAsync(openaiClient, deployName); - var student = await CreateStudentAssistantAgentAsync(openaiClient, deployName); - - var adminFunctionMiddleware = new FunctionCallMiddleware( - functions: [this.UpdateProgressFunctionContract], - functionMap: new Dictionary>> - { - { this.UpdateProgressFunctionContract.Name, this.UpdateProgressWrapper }, - }); - var admin = new OpenAIChatAgent( - openAIClient: openaiClient, - modelName: deployName, - name: "Admin", - systemMessage: $@"You are admin. You update progress after each question is answered.") - .RegisterMessageConnector() - .RegisterStreamingMiddleware(adminFunctionMiddleware) - .RegisterMiddleware(Print); - - var groupAdmin = new OpenAIChatAgent( - openAIClient: openaiClient, - modelName: deployName, - name: "GroupAdmin", - systemMessage: "You are group admin. You manage the group chat.") - .RegisterMessageConnector() - .RegisterMiddleware(Print); - await RunMathChatAsync(teacher, student, admin, groupAdmin); - } - - private async Task CreateTeacherAgentAsync(OpenAIClient client, string model) - { - var functionCallMiddleware = new FunctionCallMiddleware( - functions: [this.CreateMathQuestionFunctionContract, this.AnswerIsCorrectFunctionContract], - functionMap: new Dictionary>> - { - { this.CreateMathQuestionFunctionContract.Name!, this.CreateMathQuestionWrapper }, - { this.AnswerIsCorrectFunctionContract.Name!, this.AnswerIsCorrectWrapper }, - }); - - var teacher = new OpenAIChatAgent( - openAIClient: client, - name: "Teacher", - systemMessage: @"You are a preschool math teacher. -You create math question and ask student to answer it. -Then you check if the answer is correct. -If the answer is wrong, you ask student to fix it", - modelName: model) - .RegisterMessageConnector() - .RegisterStreamingMiddleware(functionCallMiddleware) - .RegisterMiddleware(Print); - - return teacher; - } - - private async Task CreateStudentAssistantAgentAsync(OpenAIClient client, string model) - { - var functionCallMiddleware = new FunctionCallMiddleware( - functions: [this.AnswerQuestionFunctionContract], - functionMap: new Dictionary>> - { - { this.AnswerQuestionFunctionContract.Name!, this.AnswerQuestionWrapper }, - }); - var student = new OpenAIChatAgent( - openAIClient: client, - name: "Student", - modelName: model, - systemMessage: @"You are a student. You answer math question from teacher.") - .RegisterMessageConnector() - .RegisterStreamingMiddleware(functionCallMiddleware) - .RegisterMiddleware(Print); - - return student; - } - - private async Task RunMathChatAsync(IAgent teacher, IAgent student, IAgent admin, IAgent groupAdmin) - { - var teacher2Student = Transition.Create(teacher, student); - var student2Teacher = Transition.Create(student, teacher); - var teacher2Admin = Transition.Create(teacher, admin); - var admin2Teacher = Transition.Create(admin, teacher); - var workflow = new Graph( - [ - teacher2Student, - student2Teacher, - teacher2Admin, - admin2Teacher, - ]); - var group = new GroupChat( - workflow: workflow, - members: [ - admin, - teacher, - student, - ], - admin: groupAdmin); - - var groupChatManager = new GroupChatManager(group); - var chatHistory = await admin.InitiateChatAsync(groupChatManager, "teacher, create question", maxRound: 50); - - chatHistory.Where(msg => msg.From == teacher.Name && msg.GetContent()?.Contains("[MATH_QUESTION]") is true) - .Count() - .Should().BeGreaterThanOrEqualTo(this.round); - - chatHistory.Where(msg => msg.From == student.Name && msg.GetContent()?.Contains("[MATH_ANSWER]") is true) - .Count() - .Should().BeGreaterThanOrEqualTo(this.round); - - chatHistory.Where(msg => msg.From == teacher.Name && msg.GetContent()?.Contains("[ANSWER_IS_CORRECT]") is true) - .Count() - .Should().BeGreaterThanOrEqualTo(this.round); - - // check if there's terminate chat message from admin - chatHistory.Where(msg => msg.From == admin.Name && msg.IsGroupChatTerminateMessage()) - .Count() - .Should().Be(1); - } - } -} diff --git a/dotnet/test/AutoGen.OpenAI.V1.Tests/OpenAIChatAgentTest.cs b/dotnet/test/AutoGen.OpenAI.V1.Tests/OpenAIChatAgentTest.cs deleted file mode 100644 index c7087989ba..0000000000 --- a/dotnet/test/AutoGen.OpenAI.V1.Tests/OpenAIChatAgentTest.cs +++ /dev/null @@ -1,285 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIChatAgentTest.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; -using AutoGen.OpenAI.V1.Extension; -using AutoGen.Tests; -using Azure.AI.OpenAI; -using FluentAssertions; - -namespace AutoGen.OpenAI.V1.Tests; - -public partial class OpenAIChatAgentTest -{ - /// - /// Get the weather for a location. - /// - /// location - /// - [Function] - public async Task GetWeatherAsync(string location) - { - return $"The weather in {location} is sunny."; - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task BasicConversationTestAsync() - { - var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - var openaiClient = CreateOpenAIClientFromAzureOpenAI(); - var openAIChatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "assistant", - modelName: deployName); - - // By default, OpenAIChatClient supports the following message types - // - IMessage - var chatMessageContent = MessageEnvelope.Create(new ChatRequestUserMessage("Hello")); - var reply = await openAIChatAgent.SendAsync(chatMessageContent); - - reply.Should().BeOfType>(); - reply.As>().From.Should().Be("assistant"); - reply.As>().Content.Choices.First().Message.Role.Should().Be(ChatRole.Assistant); - reply.As>().Content.Usage.TotalTokens.Should().BeGreaterThan(0); - - // test streaming - var streamingReply = openAIChatAgent.GenerateStreamingReplyAsync(new[] { chatMessageContent }); - - await foreach (var streamingMessage in streamingReply) - { - streamingMessage.Should().BeOfType>(); - streamingMessage.As>().From.Should().Be("assistant"); - } - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task OpenAIChatMessageContentConnectorTestAsync() - { - var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - var openaiClient = CreateOpenAIClientFromAzureOpenAI(); - var openAIChatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "assistant", - modelName: deployName); - - MiddlewareStreamingAgent assistant = openAIChatAgent - .RegisterMessageConnector(); - - var messages = new IMessage[] - { - MessageEnvelope.Create(new ChatRequestUserMessage("Hello")), - new TextMessage(Role.Assistant, "Hello", from: "user"), - new MultiModalMessage(Role.Assistant, - [ - new TextMessage(Role.Assistant, "Hello", from: "user"), - ], - from: "user"), - }; - - foreach (var message in messages) - { - var reply = await assistant.SendAsync(message); - - reply.Should().BeOfType(); - reply.As().From.Should().Be("assistant"); - } - - // test streaming - foreach (var message in messages) - { - var reply = assistant.GenerateStreamingReplyAsync([message]); - - await foreach (var streamingMessage in reply) - { - streamingMessage.Should().BeOfType(); - streamingMessage.As().From.Should().Be("assistant"); - } - } - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task OpenAIChatAgentToolCallTestAsync() - { - var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - var openaiClient = CreateOpenAIClientFromAzureOpenAI(); - var openAIChatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "assistant", - modelName: deployName); - - var functionCallMiddleware = new FunctionCallMiddleware( - functions: [this.GetWeatherAsyncFunctionContract]); - MiddlewareStreamingAgent assistant = openAIChatAgent - .RegisterMessageConnector(); - - assistant.StreamingMiddlewares.Count().Should().Be(1); - var functionCallAgent = assistant - .RegisterStreamingMiddleware(functionCallMiddleware); - - var question = "What's the weather in Seattle"; - var messages = new IMessage[] - { - MessageEnvelope.Create(new ChatRequestUserMessage(question)), - new TextMessage(Role.Assistant, question, from: "user"), - new MultiModalMessage(Role.Assistant, - [ - new TextMessage(Role.Assistant, question, from: "user"), - ], - from: "user"), - }; - - foreach (var message in messages) - { - var reply = await functionCallAgent.SendAsync(message); - - reply.Should().BeOfType(); - reply.As().From.Should().Be("assistant"); - reply.As().ToolCalls.Count().Should().Be(1); - reply.As().ToolCalls.First().FunctionName.Should().Be(this.GetWeatherAsyncFunctionContract.Name); - } - - // test streaming - foreach (var message in messages) - { - var reply = functionCallAgent.GenerateStreamingReplyAsync([message]); - ToolCallMessage? toolCallMessage = null; - await foreach (var streamingMessage in reply) - { - streamingMessage.Should().BeOfType(); - streamingMessage.As().From.Should().Be("assistant"); - if (toolCallMessage is null) - { - toolCallMessage = new ToolCallMessage(streamingMessage.As()); - } - else - { - toolCallMessage.Update(streamingMessage.As()); - } - } - - toolCallMessage.Should().NotBeNull(); - toolCallMessage!.From.Should().Be("assistant"); - toolCallMessage.ToolCalls.Count().Should().Be(1); - toolCallMessage.ToolCalls.First().FunctionName.Should().Be(this.GetWeatherAsyncFunctionContract.Name); - } - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task OpenAIChatAgentToolCallInvokingTestAsync() - { - var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - var openaiClient = CreateOpenAIClientFromAzureOpenAI(); - var openAIChatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "assistant", - modelName: deployName); - - var functionCallMiddleware = new FunctionCallMiddleware( - functions: [this.GetWeatherAsyncFunctionContract], - functionMap: new Dictionary>> { { this.GetWeatherAsyncFunctionContract.Name!, this.GetWeatherAsyncWrapper } }); - MiddlewareStreamingAgent assistant = openAIChatAgent - .RegisterMessageConnector(); - - var functionCallAgent = assistant - .RegisterStreamingMiddleware(functionCallMiddleware); - - var question = "What's the weather in Seattle"; - var messages = new IMessage[] - { - MessageEnvelope.Create(new ChatRequestUserMessage(question)), - new TextMessage(Role.Assistant, question, from: "user"), - new MultiModalMessage(Role.Assistant, - [ - new TextMessage(Role.Assistant, question, from: "user"), - ], - from: "user"), - }; - - foreach (var message in messages) - { - var reply = await functionCallAgent.SendAsync(message); - - reply.Should().BeOfType(); - reply.From.Should().Be("assistant"); - reply.GetToolCalls()!.Count().Should().Be(1); - reply.GetToolCalls()!.First().FunctionName.Should().Be(this.GetWeatherAsyncFunctionContract.Name); - reply.GetContent()!.ToLower().Should().Contain("seattle"); - } - - // test streaming - foreach (var message in messages) - { - var reply = functionCallAgent.GenerateStreamingReplyAsync([message]); - await foreach (var streamingMessage in reply) - { - if (streamingMessage is not IMessage) - { - streamingMessage.Should().BeOfType(); - streamingMessage.As().From.Should().Be("assistant"); - } - else - { - streamingMessage.Should().BeOfType(); - streamingMessage.As().GetContent()!.ToLower().Should().Contain("seattle"); - } - } - } - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task ItCreateOpenAIChatAgentWithChatCompletionOptionAsync() - { - var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - var openaiClient = CreateOpenAIClientFromAzureOpenAI(); - var options = new ChatCompletionsOptions(deployName, []) - { - Temperature = 0.7f, - MaxTokens = 1, - }; - - var openAIChatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "assistant", - options: options) - .RegisterMessageConnector(); - - var respond = await openAIChatAgent.SendAsync("hello"); - respond.GetContent()?.Should().NotBeNullOrEmpty(); - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task ItThrowExceptionWhenChatCompletionOptionContainsMessages() - { - var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - var openaiClient = CreateOpenAIClientFromAzureOpenAI(); - var options = new ChatCompletionsOptions(deployName, [new ChatRequestUserMessage("hi")]) - { - Temperature = 0.7f, - MaxTokens = 1, - }; - - var action = () => new OpenAIChatAgent( - openAIClient: openaiClient, - name: "assistant", - options: options) - .RegisterMessageConnector(); - - action.Should().ThrowExactly().WithMessage("Messages should not be provided in options"); - } - - private OpenAIClient CreateOpenAIClientFromAzureOpenAI() - { - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - return new OpenAIClient(new Uri(endpoint), new Azure.AzureKeyCredential(key)); - } -} diff --git a/dotnet/test/AutoGen.OpenAI.V1.Tests/OpenAIMessageTests.cs b/dotnet/test/AutoGen.OpenAI.V1.Tests/OpenAIMessageTests.cs deleted file mode 100644 index e2e9be0495..0000000000 --- a/dotnet/test/AutoGen.OpenAI.V1.Tests/OpenAIMessageTests.cs +++ /dev/null @@ -1,730 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIMessageTests.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Reflection; -using System.Text.Json; -using System.Threading.Tasks; -using ApprovalTests; -using ApprovalTests.Namers; -using ApprovalTests.Reporters; -using AutoGen.Tests; -using Azure.AI.OpenAI; -using FluentAssertions; -using Xunit; - -namespace AutoGen.OpenAI.V1.Tests; - -public class OpenAIMessageTests -{ - private readonly JsonSerializerOptions jsonSerializerOptions = new JsonSerializerOptions - { - WriteIndented = true, - IgnoreReadOnlyProperties = false, - }; - - [Fact] - [UseReporter(typeof(DiffReporter))] - [UseApprovalSubdirectory("ApprovalTests")] - public void BasicMessageTest() - { - IMessage[] messages = [ - new TextMessage(Role.System, "You are a helpful AI assistant"), - new TextMessage(Role.User, "Hello", "user"), - new TextMessage(Role.Assistant, "How can I help you?", from: "assistant"), - new ImageMessage(Role.User, "https://example.com/image.png", "user"), - new MultiModalMessage(Role.Assistant, - [ - new TextMessage(Role.User, "Hello", "user"), - new ImageMessage(Role.User, "https://example.com/image.png", "user"), - ], "user"), - new ToolCallMessage("test", "test", "assistant"), - new ToolCallResultMessage("result", "test", "test", "user"), - new ToolCallResultMessage( - [ - new ToolCall("result", "test", "test"), - new ToolCall("result", "test", "test"), - ], "user"), - new ToolCallMessage( - [ - new ToolCall("test", "test"), - new ToolCall("test", "test"), - ], "assistant"), - new AggregateMessage( - message1: new ToolCallMessage("test", "test", "assistant"), - message2: new ToolCallResultMessage("result", "test", "test", "assistant"), "assistant"), - ]; - var openaiMessageConnectorMiddleware = new OpenAIChatRequestMessageConnector(); - var agent = new EchoAgent("assistant"); - - var oaiMessages = messages.Select(m => (m, openaiMessageConnectorMiddleware.ProcessIncomingMessages(agent, [m]))); - VerifyOAIMessages(oaiMessages); - } - - [Fact] - public async Task ItProcessUserTextMessageAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestUserMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().Be("Hello"); - chatRequestMessage.Name.Should().Be("user"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - IMessage message = new TextMessage(Role.User, "Hello", "user"); - await agent.GenerateReplyAsync([message]); - } - - [Fact] - public async Task ItShortcutChatRequestMessageAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - - var chatRequestMessage = (ChatRequestUserMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().Be("hello"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - var userMessage = new ChatRequestUserMessage("hello"); - var chatRequestMessage = MessageEnvelope.Create(userMessage); - await agent.GenerateReplyAsync([chatRequestMessage]); - } - - [Fact] - public async Task ItShortcutMessageWhenStrictModelIsFalseAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - - var chatRequestMessage = ((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Should().Be("hello"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - var userMessage = "hello"; - var chatRequestMessage = MessageEnvelope.Create(userMessage); - await agent.GenerateReplyAsync([chatRequestMessage]); - } - - [Fact] - public async Task ItThrowExceptionWhenStrictModeIsTrueAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(true); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(middleware); - - // user message - var userMessage = "hello"; - var chatRequestMessage = MessageEnvelope.Create(userMessage); - Func action = async () => await agent.GenerateReplyAsync([chatRequestMessage]); - - await action.Should().ThrowAsync().WithMessage("Invalid message type: MessageEnvelope`1"); - } - - [Fact] - public async Task ItProcessAssistantTextMessageAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestAssistantMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().Be("How can I help you?"); - chatRequestMessage.Name.Should().Be("assistant"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // assistant message - IMessage message = new TextMessage(Role.Assistant, "How can I help you?", "assistant"); - await agent.GenerateReplyAsync([message]); - } - - [Fact] - public async Task ItProcessSystemTextMessageAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestSystemMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().Be("You are a helpful AI assistant"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // system message - IMessage message = new TextMessage(Role.System, "You are a helpful AI assistant"); - await agent.GenerateReplyAsync([message]); - } - - [Fact] - public async Task ItProcessImageMessageAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestUserMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().BeNullOrEmpty(); - chatRequestMessage.Name.Should().Be("user"); - chatRequestMessage.MultimodalContentItems.Count().Should().Be(1); - chatRequestMessage.MultimodalContentItems.First().Should().BeOfType(); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - IMessage message = new ImageMessage(Role.User, "https://example.com/image.png", "user"); - await agent.GenerateReplyAsync([message]); - } - - [Fact] - public async Task ItThrowExceptionWhenProcessingImageMessageFromSelfAndStrictModeIsTrueAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(true); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(middleware); - - var imageMessage = new ImageMessage(Role.Assistant, "https://example.com/image.png", "assistant"); - Func action = async () => await agent.GenerateReplyAsync([imageMessage]); - - await action.Should().ThrowAsync().WithMessage("Invalid message type: ImageMessage"); - } - - [Fact] - public async Task ItProcessMultiModalMessageAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestUserMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().BeNullOrEmpty(); - chatRequestMessage.Name.Should().Be("user"); - chatRequestMessage.MultimodalContentItems.Count().Should().Be(2); - chatRequestMessage.MultimodalContentItems.First().Should().BeOfType(); - chatRequestMessage.MultimodalContentItems.Last().Should().BeOfType(); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - IMessage message = new MultiModalMessage( - Role.User, - [ - new TextMessage(Role.User, "Hello", "user"), - new ImageMessage(Role.User, "https://example.com/image.png", "user"), - ], "user"); - await agent.GenerateReplyAsync([message]); - } - - [Fact] - public async Task ItThrowExceptionWhenProcessingMultiModalMessageFromSelfAndStrictModeIsTrueAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(true); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(middleware); - - var multiModalMessage = new MultiModalMessage( - Role.Assistant, - [ - new TextMessage(Role.User, "Hello", "assistant"), - new ImageMessage(Role.User, "https://example.com/image.png", "assistant"), - ], "assistant"); - - Func action = async () => await agent.GenerateReplyAsync([multiModalMessage]); - - await action.Should().ThrowAsync().WithMessage("Invalid message type: MultiModalMessage"); - } - - [Fact] - public async Task ItProcessToolCallMessageAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestAssistantMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Name.Should().Be("assistant"); - chatRequestMessage.ToolCalls.Count().Should().Be(1); - chatRequestMessage.Content.Should().Be("textContent"); - chatRequestMessage.ToolCalls.First().Should().BeOfType(); - var functionToolCall = (ChatCompletionsFunctionToolCall)chatRequestMessage.ToolCalls.First(); - functionToolCall.Name.Should().Be("test"); - functionToolCall.Id.Should().Be("test"); - functionToolCall.Arguments.Should().Be("test"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - IMessage message = new ToolCallMessage("test", "test", "assistant") - { - Content = "textContent", - }; - await agent.GenerateReplyAsync([message]); - } - - [Fact] - public async Task ItProcessParallelToolCallMessageAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestAssistantMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().BeNullOrEmpty(); - chatRequestMessage.Name.Should().Be("assistant"); - chatRequestMessage.ToolCalls.Count().Should().Be(2); - for (int i = 0; i < chatRequestMessage.ToolCalls.Count(); i++) - { - chatRequestMessage.ToolCalls.ElementAt(i).Should().BeOfType(); - var functionToolCall = (ChatCompletionsFunctionToolCall)chatRequestMessage.ToolCalls.ElementAt(i); - functionToolCall.Name.Should().Be("test"); - functionToolCall.Id.Should().Be($"test_{i}"); - functionToolCall.Arguments.Should().Be("test"); - } - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - var toolCalls = new[] - { - new ToolCall("test", "test"), - new ToolCall("test", "test"), - }; - IMessage message = new ToolCallMessage(toolCalls, "assistant"); - await agent.GenerateReplyAsync([message]); - } - - [Fact] - public async Task ItThrowExceptionWhenProcessingToolCallMessageFromUserAndStrictModeIsTrueAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(strictMode: true); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(middleware); - - var toolCallMessage = new ToolCallMessage("test", "test", "user"); - Func action = async () => await agent.GenerateReplyAsync([toolCallMessage]); - await action.Should().ThrowAsync().WithMessage("Invalid message type: ToolCallMessage"); - } - - [Fact] - public async Task ItProcessToolCallResultMessageAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestToolMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().Be("result"); - chatRequestMessage.ToolCallId.Should().Be("test"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - IMessage message = new ToolCallResultMessage("result", "test", "test", "user"); - await agent.GenerateReplyAsync([message]); - } - - [Fact] - public async Task ItProcessParallelToolCallResultMessageAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - msgs.Count().Should().Be(2); - - for (int i = 0; i < msgs.Count(); i++) - { - var innerMessage = msgs.ElementAt(i); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestToolMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().Be("result"); - chatRequestMessage.ToolCallId.Should().Be($"test_{i}"); - } - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - var toolCalls = new[] - { - new ToolCall("test", "test", "result"), - new ToolCall("test", "test", "result"), - }; - IMessage message = new ToolCallResultMessage(toolCalls, "user"); - await agent.GenerateReplyAsync([message]); - } - - [Fact] - public async Task ItProcessFunctionCallMiddlewareMessageFromUserAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - msgs.Count().Should().Be(1); - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestUserMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().Be("result"); - chatRequestMessage.Name.Should().Be("user"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - var toolCallMessage = new ToolCallMessage("test", "test", "user"); - var toolCallResultMessage = new ToolCallResultMessage("result", "test", "test", "user"); - var aggregateMessage = new AggregateMessage(toolCallMessage, toolCallResultMessage, "user"); - await agent.GenerateReplyAsync([aggregateMessage]); - } - - [Fact] - public async Task ItProcessFunctionCallMiddlewareMessageFromAssistantAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - msgs.Count().Should().Be(2); - var innerMessage = msgs.Last(); - innerMessage!.Should().BeOfType>(); - var chatRequestMessage = (ChatRequestToolMessage)((MessageEnvelope)innerMessage!).Content; - chatRequestMessage.Content.Should().Be("result"); - chatRequestMessage.ToolCallId.Should().Be("test"); - - var toolCallMessage = msgs.First(); - toolCallMessage!.Should().BeOfType>(); - var toolCallRequestMessage = (ChatRequestAssistantMessage)((MessageEnvelope)toolCallMessage!).Content; - toolCallRequestMessage.Content.Should().BeNullOrEmpty(); - toolCallRequestMessage.ToolCalls.Count().Should().Be(1); - toolCallRequestMessage.ToolCalls.First().Should().BeOfType(); - var functionToolCall = (ChatCompletionsFunctionToolCall)toolCallRequestMessage.ToolCalls.First(); - functionToolCall.Name.Should().Be("test"); - functionToolCall.Id.Should().Be("test"); - functionToolCall.Arguments.Should().Be("test"); - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - var toolCallMessage = new ToolCallMessage("test", "test", "assistant"); - var toolCallResultMessage = new ToolCallResultMessage("result", "test", "test", "assistant"); - var aggregateMessage = new ToolCallAggregateMessage(toolCallMessage, toolCallResultMessage, "assistant"); - await agent.GenerateReplyAsync([aggregateMessage]); - } - - [Fact] - public async Task ItProcessParallelFunctionCallMiddlewareMessageFromAssistantAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(async (msgs, _, innerAgent, _) => - { - msgs.Count().Should().Be(3); - var toolCallMessage = msgs.First(); - toolCallMessage!.Should().BeOfType>(); - var toolCallRequestMessage = (ChatRequestAssistantMessage)((MessageEnvelope)toolCallMessage!).Content; - toolCallRequestMessage.Content.Should().BeNullOrEmpty(); - toolCallRequestMessage.ToolCalls.Count().Should().Be(2); - - for (int i = 0; i < toolCallRequestMessage.ToolCalls.Count(); i++) - { - toolCallRequestMessage.ToolCalls.ElementAt(i).Should().BeOfType(); - var functionToolCall = (ChatCompletionsFunctionToolCall)toolCallRequestMessage.ToolCalls.ElementAt(i); - functionToolCall.Name.Should().Be("test"); - functionToolCall.Id.Should().Be($"test_{i}"); - functionToolCall.Arguments.Should().Be("test"); - } - - for (int i = 1; i < msgs.Count(); i++) - { - var toolCallResultMessage = msgs.ElementAt(i); - toolCallResultMessage!.Should().BeOfType>(); - var toolCallResultRequestMessage = (ChatRequestToolMessage)((MessageEnvelope)toolCallResultMessage!).Content; - toolCallResultRequestMessage.Content.Should().Be("result"); - toolCallResultRequestMessage.ToolCallId.Should().Be($"test_{i - 1}"); - } - - return await innerAgent.GenerateReplyAsync(msgs); - }) - .RegisterMiddleware(middleware); - - // user message - var toolCalls = new[] - { - new ToolCall("test", "test", "result"), - new ToolCall("test", "test", "result"), - }; - var toolCallMessage = new ToolCallMessage(toolCalls, "assistant"); - var toolCallResultMessage = new ToolCallResultMessage(toolCalls, "assistant"); - var aggregateMessage = new AggregateMessage(toolCallMessage, toolCallResultMessage, "assistant"); - await agent.GenerateReplyAsync([aggregateMessage]); - } - - [Fact] - public async Task ItConvertChatResponseMessageToTextMessageAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(middleware); - - // text message - var textMessage = CreateInstance(ChatRole.Assistant, "hello"); - var chatRequestMessage = MessageEnvelope.Create(textMessage); - - var message = await agent.GenerateReplyAsync([chatRequestMessage]); - message.Should().BeOfType(); - message.GetContent().Should().Be("hello"); - message.GetRole().Should().Be(Role.Assistant); - } - - [Fact] - public async Task ItConvertChatResponseMessageToToolCallMessageAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(middleware); - - // tool call message - var toolCallMessage = CreateInstance(ChatRole.Assistant, "textContent", new[] { new ChatCompletionsFunctionToolCall("test", "test", "test") }, new FunctionCall("test", "test"), CreateInstance(), new Dictionary()); - var chatRequestMessage = MessageEnvelope.Create(toolCallMessage); - var message = await agent.GenerateReplyAsync([chatRequestMessage]); - message.Should().BeOfType(); - message.GetToolCalls()!.Count().Should().Be(1); - message.GetToolCalls()!.First().FunctionName.Should().Be("test"); - message.GetToolCalls()!.First().FunctionArguments.Should().Be("test"); - message.GetContent().Should().Be("textContent"); - } - - [Fact] - public async Task ItReturnOriginalMessageWhenStrictModeIsFalseAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(middleware); - - // text message - var textMessage = "hello"; - var messageToSend = MessageEnvelope.Create(textMessage); - - var message = await agent.GenerateReplyAsync([messageToSend]); - message.Should().BeOfType>(); - } - - [Fact] - public async Task ItThrowInvalidOperationExceptionWhenStrictModeIsTrueAsync() - { - var middleware = new OpenAIChatRequestMessageConnector(true); - var agent = new EchoAgent("assistant") - .RegisterMiddleware(middleware); - - // text message - var textMessage = new ChatRequestUserMessage("hello"); - var messageToSend = MessageEnvelope.Create(textMessage); - Func action = async () => await agent.GenerateReplyAsync([messageToSend]); - - await action.Should().ThrowAsync().WithMessage("Invalid return message type MessageEnvelope`1"); - } - - [Fact] - public void ToOpenAIChatRequestMessageShortCircuitTest() - { - var agent = new EchoAgent("assistant"); - var middleware = new OpenAIChatRequestMessageConnector(); - ChatRequestMessage[] messages = - [ - new ChatRequestUserMessage("Hello"), - new ChatRequestAssistantMessage("How can I help you?"), - new ChatRequestSystemMessage("You are a helpful AI assistant"), - new ChatRequestFunctionMessage("result", "functionName"), - new ChatRequestToolMessage("test", "test"), - ]; - - foreach (var oaiMessage in messages) - { - IMessage message = new MessageEnvelope(oaiMessage); - var oaiMessages = middleware.ProcessIncomingMessages(agent, [message]); - oaiMessages.Count().Should().Be(1); - //oaiMessages.First().Should().BeOfType>(); - if (oaiMessages.First() is IMessage chatRequestMessage) - { - chatRequestMessage.Content.Should().Be(oaiMessage); - } - else - { - // fail the test - Assert.True(false); - } - } - } - private void VerifyOAIMessages(IEnumerable<(IMessage, IEnumerable)> messages) - { - var jsonObjects = messages.Select(pair => - { - var (originalMessage, ms) = pair; - var objs = new List(); - foreach (var m in ms) - { - object? obj = null; - var chatRequestMessage = (m as IMessage)?.Content; - if (chatRequestMessage is ChatRequestUserMessage userMessage) - { - obj = new - { - Role = userMessage.Role.ToString(), - Content = userMessage.Content, - Name = userMessage.Name, - MultiModaItem = userMessage.MultimodalContentItems?.Select(item => - { - return item switch - { - ChatMessageImageContentItem imageContentItem => new - { - Type = "Image", - ImageUrl = GetImageUrlFromContent(imageContentItem), - } as object, - ChatMessageTextContentItem textContentItem => new - { - Type = "Text", - Text = textContentItem.Text, - } as object, - _ => throw new System.NotImplementedException(), - }; - }), - }; - } - - if (chatRequestMessage is ChatRequestAssistantMessage assistantMessage) - { - obj = new - { - Role = assistantMessage.Role.ToString(), - Content = assistantMessage.Content, - Name = assistantMessage.Name, - TooCall = assistantMessage.ToolCalls.Select(tc => - { - return tc switch - { - ChatCompletionsFunctionToolCall functionToolCall => new - { - Type = "Function", - Name = functionToolCall.Name, - Arguments = functionToolCall.Arguments, - Id = functionToolCall.Id, - } as object, - _ => throw new System.NotImplementedException(), - }; - }), - FunctionCallName = assistantMessage.FunctionCall?.Name, - FunctionCallArguments = assistantMessage.FunctionCall?.Arguments, - }; - } - - if (chatRequestMessage is ChatRequestSystemMessage systemMessage) - { - obj = new - { - Name = systemMessage.Name, - Role = systemMessage.Role.ToString(), - Content = systemMessage.Content, - }; - } - - if (chatRequestMessage is ChatRequestFunctionMessage functionMessage) - { - obj = new - { - Role = functionMessage.Role.ToString(), - Content = functionMessage.Content, - Name = functionMessage.Name, - }; - } - - if (chatRequestMessage is ChatRequestToolMessage toolCallMessage) - { - obj = new - { - Role = toolCallMessage.Role.ToString(), - Content = toolCallMessage.Content, - ToolCallId = toolCallMessage.ToolCallId, - }; - } - - objs.Add(obj ?? throw new System.NotImplementedException()); - } - - return new - { - OriginalMessage = originalMessage.ToString(), - ConvertedMessages = objs, - }; - }); - - var json = JsonSerializer.Serialize(jsonObjects, this.jsonSerializerOptions); - Approvals.Verify(json); - } - - private object? GetImageUrlFromContent(ChatMessageImageContentItem content) - { - return content.GetType().GetProperty("ImageUrl", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance)?.GetValue(content); - } - - private static T CreateInstance(params object[] args) - { - var type = typeof(T); - var instance = type.Assembly.CreateInstance( - type.FullName!, false, - BindingFlags.Instance | BindingFlags.NonPublic, - null, args, null, null); - return (T)instance!; - } -} diff --git a/dotnet/test/AutoGen.SemanticKernel.Tests/ApprovalTests/KernelFunctionExtensionTests.ItCreateFunctionContractsFromMethod.approved.txt b/dotnet/test/AutoGen.SemanticKernel.Tests/ApprovalTests/KernelFunctionExtensionTests.ItCreateFunctionContractsFromMethod.approved.txt deleted file mode 100644 index eb346da3b3..0000000000 --- a/dotnet/test/AutoGen.SemanticKernel.Tests/ApprovalTests/KernelFunctionExtensionTests.ItCreateFunctionContractsFromMethod.approved.txt +++ /dev/null @@ -1,23 +0,0 @@ -ο»Ώ[ - { - "Name": "_ItCreateFunctionContractsFromMethod_b__2_0", - "Description": "", - "Parameters": [], - "ReturnType": "System.String, System.Private.CoreLib, Version=8.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e", - "ReturnDescription": "" - }, - { - "Name": "_ItCreateFunctionContractsFromMethod_b__2_1", - "Description": "", - "Parameters": [ - { - "Name": "message", - "Description": "", - "ParameterType": "System.String, System.Private.CoreLib, Version=8.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e", - "IsRequired": true - } - ], - "ReturnType": "System.String, System.Private.CoreLib, Version=8.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e", - "ReturnDescription": "" - } -] \ No newline at end of file diff --git a/dotnet/test/AutoGen.SemanticKernel.Tests/ApprovalTests/KernelFunctionExtensionTests.ItCreateFunctionContractsFromPrompt.approved.txt b/dotnet/test/AutoGen.SemanticKernel.Tests/ApprovalTests/KernelFunctionExtensionTests.ItCreateFunctionContractsFromPrompt.approved.txt deleted file mode 100644 index 428f53572f..0000000000 --- a/dotnet/test/AutoGen.SemanticKernel.Tests/ApprovalTests/KernelFunctionExtensionTests.ItCreateFunctionContractsFromPrompt.approved.txt +++ /dev/null @@ -1,8 +0,0 @@ -ο»Ώ[ - { - "Name": "sayHello", - "Description": "Generic function, unknown purpose", - "Parameters": [], - "ReturnDescription": "" - } -] \ No newline at end of file diff --git a/dotnet/test/AutoGen.SemanticKernel.Tests/ApprovalTests/KernelFunctionExtensionTests.ItCreateFunctionContractsFromTestPlugin.approved.txt b/dotnet/test/AutoGen.SemanticKernel.Tests/ApprovalTests/KernelFunctionExtensionTests.ItCreateFunctionContractsFromTestPlugin.approved.txt deleted file mode 100644 index 9ed3c675e4..0000000000 --- a/dotnet/test/AutoGen.SemanticKernel.Tests/ApprovalTests/KernelFunctionExtensionTests.ItCreateFunctionContractsFromTestPlugin.approved.txt +++ /dev/null @@ -1,25 +0,0 @@ -ο»Ώ[ - { - "ClassName": "test_plugin", - "Name": "GetState", - "Description": "Gets the state of the light.", - "Parameters": [], - "ReturnType": "System.String, System.Private.CoreLib, Version=8.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e", - "ReturnDescription": "" - }, - { - "ClassName": "test_plugin", - "Name": "ChangeState", - "Description": "Changes the state of the light.'", - "Parameters": [ - { - "Name": "newState", - "Description": "new state", - "ParameterType": "System.Boolean, System.Private.CoreLib, Version=8.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e", - "IsRequired": true - } - ], - "ReturnType": "System.String, System.Private.CoreLib, Version=8.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e", - "ReturnDescription": "" - } -] \ No newline at end of file diff --git a/dotnet/test/AutoGen.SemanticKernel.Tests/AutoGen.SemanticKernel.Tests.csproj b/dotnet/test/AutoGen.SemanticKernel.Tests/AutoGen.SemanticKernel.Tests.csproj deleted file mode 100644 index 7f42b67da7..0000000000 --- a/dotnet/test/AutoGen.SemanticKernel.Tests/AutoGen.SemanticKernel.Tests.csproj +++ /dev/null @@ -1,19 +0,0 @@ -ο»Ώ - - - $(TestTargetFrameworks) - enable - false - $(NoWarn);SKEXP0110 - True - True - - - - - - - - - - diff --git a/dotnet/test/AutoGen.SemanticKernel.Tests/KernelFunctionExtensionTests.cs b/dotnet/test/AutoGen.SemanticKernel.Tests/KernelFunctionExtensionTests.cs deleted file mode 100644 index 48bdff8576..0000000000 --- a/dotnet/test/AutoGen.SemanticKernel.Tests/KernelFunctionExtensionTests.cs +++ /dev/null @@ -1,110 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// KernelFunctionExtensionTests.cs - -using System.ComponentModel; -using ApprovalTests; -using ApprovalTests.Namers; -using ApprovalTests.Reporters; -using AutoGen.SemanticKernel.Extension; -using FluentAssertions; -using Microsoft.SemanticKernel; -using Newtonsoft.Json; -using Xunit; - -namespace AutoGen.SemanticKernel.Tests; - -public class TestPlugin -{ - public bool IsOn { get; set; } = false; - - [KernelFunction] - [Description("Gets the state of the light.")] - public string GetState() => this.IsOn ? "on" : "off"; - - [KernelFunction] - [Description("Changes the state of the light.'")] - public string ChangeState( - [Description("new state")] bool newState) - { - this.IsOn = newState; - var state = this.GetState(); - - // Print the state to the console - Console.ForegroundColor = ConsoleColor.DarkBlue; - Console.WriteLine($"[Light is now {state}]"); - Console.ResetColor(); - - return $"The status of the light is now {state}"; - } -} -public class KernelFunctionExtensionTests -{ - private readonly JsonSerializerSettings _serializerSettings = new JsonSerializerSettings - { - Formatting = Formatting.Indented, - NullValueHandling = NullValueHandling.Ignore, - StringEscapeHandling = StringEscapeHandling.Default, - }; - - [Fact] - [UseReporter(typeof(DiffReporter))] - [UseApprovalSubdirectory("ApprovalTests")] - public void ItCreateFunctionContractsFromTestPlugin() - { - var kernel = new Kernel(); - var plugin = kernel.ImportPluginFromType("test_plugin"); - - var functionContracts = plugin.Select(f => f.Metadata.ToFunctionContract()).ToList(); - - functionContracts.Count.Should().Be(2); - var json = JsonConvert.SerializeObject(functionContracts, _serializerSettings); - - Approvals.Verify(json); - } - - [Fact] - [UseReporter(typeof(DiffReporter))] - [UseApprovalSubdirectory("ApprovalTests")] - public void ItCreateFunctionContractsFromMethod() - { - var kernel = new Kernel(); - var sayHelloFunction = KernelFunctionFactory.CreateFromMethod(() => "Hello, World!"); - var echoFunction = KernelFunctionFactory.CreateFromMethod((string message) => message); - - var functionContracts = new[] - { - sayHelloFunction.Metadata.ToFunctionContract(), - echoFunction.Metadata.ToFunctionContract(), - }; - - var json = JsonConvert.SerializeObject(functionContracts, _serializerSettings); - - functionContracts.Length.Should().Be(2); - Approvals.Verify(json); - } - - [Fact] - [UseReporter(typeof(DiffReporter))] - [UseApprovalSubdirectory("ApprovalTests")] - public void ItCreateFunctionContractsFromPrompt() - { - var kernel = new Kernel(); - var sayHelloFunction = KernelFunctionFactory.CreateFromPrompt("Say {{hello}}, World!", functionName: "sayHello"); - - var functionContracts = new[] - { - sayHelloFunction.Metadata.ToFunctionContract(), - }; - - var json = JsonConvert.SerializeObject(functionContracts, _serializerSettings); - - functionContracts.Length.Should().Be(1); - Approvals.Verify(json); - } -} diff --git a/dotnet/test/AutoGen.SemanticKernel.Tests/KernelFunctionMiddlewareTests.cs b/dotnet/test/AutoGen.SemanticKernel.Tests/KernelFunctionMiddlewareTests.cs deleted file mode 100644 index 380511d42a..0000000000 --- a/dotnet/test/AutoGen.SemanticKernel.Tests/KernelFunctionMiddlewareTests.cs +++ /dev/null @@ -1,129 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// KernelFunctionMiddlewareTests.cs - -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using AutoGen.Tests; -using Azure.AI.OpenAI; -using FluentAssertions; -using Microsoft.SemanticKernel; - -namespace AutoGen.SemanticKernel.Tests; - -public class KernelFunctionMiddlewareTests -{ - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task ItRegisterKernelFunctionMiddlewareFromTestPluginTests() - { - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - var openaiClient = new OpenAIClient(new Uri(endpoint), new Azure.AzureKeyCredential(key)); - - var kernel = new Kernel(); - var plugin = kernel.ImportPluginFromType(); - var kernelFunctionMiddleware = new KernelPluginMiddleware(kernel, plugin); - - var agent = new OpenAIChatAgent(openaiClient, "assistant", modelName: deployName) - .RegisterMessageConnector() - .RegisterMiddleware(kernelFunctionMiddleware); - - var reply = await agent.SendAsync("what's the status of the light?"); - reply.GetContent().Should().Be("off"); - reply.Should().BeOfType(); - if (reply is ToolCallAggregateMessage aggregateMessage) - { - var toolCallMessage = aggregateMessage.Message1; - toolCallMessage.ToolCalls.Should().HaveCount(1); - toolCallMessage.ToolCalls[0].FunctionName.Should().Be("GetState"); - - var toolCallResultMessage = aggregateMessage.Message2; - toolCallResultMessage.ToolCalls.Should().HaveCount(1); - toolCallResultMessage.ToolCalls[0].Result.Should().Be("off"); - } - - reply = await agent.SendAsync("change the status of the light to on"); - reply.GetContent().Should().Be("The status of the light is now on"); - reply.Should().BeOfType(); - if (reply is ToolCallAggregateMessage aggregateMessage1) - { - var toolCallMessage = aggregateMessage1.Message1; - toolCallMessage.ToolCalls.Should().HaveCount(1); - toolCallMessage.ToolCalls[0].FunctionName.Should().Be("ChangeState"); - - var toolCallResultMessage = aggregateMessage1.Message2; - toolCallResultMessage.ToolCalls.Should().HaveCount(1); - } - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task ItRegisterKernelFunctionMiddlewareFromMethodTests() - { - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - var openaiClient = new OpenAIClient(new Uri(endpoint), new Azure.AzureKeyCredential(key)); - - var kernel = new Kernel(); - var getWeatherMethod = kernel.CreateFunctionFromMethod((string location) => $"The weather in {location} is sunny.", functionName: "GetWeather", description: "Get the weather for a location."); - var createPersonObjectMethod = kernel.CreateFunctionFromMethod((string name, string email, int age) => new Person(name, email, age), functionName: "CreatePersonObject", description: "Creates a person object."); - var plugin = kernel.ImportPluginFromFunctions("plugin", [getWeatherMethod, createPersonObjectMethod]); - var kernelFunctionMiddleware = new KernelPluginMiddleware(kernel, plugin); - - var agent = new OpenAIChatAgent(openaiClient, "assistant", modelName: deployName) - .RegisterMessageConnector() - .RegisterMiddleware(kernelFunctionMiddleware); - - var reply = await agent.SendAsync("what's the weather in Seattle?"); - reply.GetContent().Should().Be("The weather in Seattle is sunny."); - reply.Should().BeOfType(); - if (reply is ToolCallAggregateMessage getWeatherMessage) - { - var toolCallMessage = getWeatherMessage.Message1; - toolCallMessage.ToolCalls.Should().HaveCount(1); - toolCallMessage.ToolCalls[0].FunctionName.Should().Be("GetWeather"); - - var toolCallResultMessage = getWeatherMessage.Message2; - toolCallResultMessage.ToolCalls.Should().HaveCount(1); - } - - reply = await agent.SendAsync("Create a person object with name: John, email: 12345@gmail.com, age: 30"); - reply.GetContent().Should().Be("Name: John, Email: 12345@gmail.com, Age: 30"); - reply.Should().BeOfType(); - if (reply is ToolCallAggregateMessage createPersonObjectMessage) - { - var toolCallMessage = createPersonObjectMessage.Message1; - toolCallMessage.ToolCalls.Should().HaveCount(1); - toolCallMessage.ToolCalls[0].FunctionName.Should().Be("CreatePersonObject"); - - var toolCallResultMessage = createPersonObjectMessage.Message2; - toolCallResultMessage.ToolCalls.Should().HaveCount(1); - } - } -} - -public class Person -{ - public Person(string name, string email, int age) - { - this.Name = name; - this.Email = email; - this.Age = age; - } - - public string Name { get; set; } - public string Email { get; set; } - public int Age { get; set; } - - public override string ToString() - { - return $"Name: {this.Name}, Email: {this.Email}, Age: {this.Age}"; - } -} diff --git a/dotnet/test/AutoGen.SemanticKernel.Tests/SemanticKernelAgentTest.cs b/dotnet/test/AutoGen.SemanticKernel.Tests/SemanticKernelAgentTest.cs deleted file mode 100644 index fc6fa83cc5..0000000000 --- a/dotnet/test/AutoGen.SemanticKernel.Tests/SemanticKernelAgentTest.cs +++ /dev/null @@ -1,248 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// SemanticKernelAgentTest.cs - -using AutoGen.Core; -using AutoGen.SemanticKernel.Extension; -using AutoGen.Tests; -using FluentAssertions; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; - -namespace AutoGen.SemanticKernel.Tests; - -public partial class SemanticKernelAgentTest -{ - /// - /// Get the weather for a location. - /// - /// location - /// - [Function] - public async Task GetWeatherAsync(string location) - { - return $"The weather in {location} is sunny."; - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task BasicConversationTestAsync() - { - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - var builder = Kernel.CreateBuilder() - .AddAzureOpenAIChatCompletion(deploymentName, endpoint, key); - - var kernel = builder.Build(); - - var skAgent = new SemanticKernelAgent(kernel, "assistant"); - - var chatMessageContent = MessageEnvelope.Create(new ChatMessageContent(AuthorRole.Assistant, "Hello")); - var reply = await skAgent.SendAsync(chatMessageContent); - - reply.Should().BeOfType>(); - reply.As>().From.Should().Be("assistant"); - - // test streaming - var streamingReply = skAgent.GenerateStreamingReplyAsync(new[] { chatMessageContent }); - - await foreach (var streamingMessage in streamingReply) - { - streamingMessage.Should().BeOfType>(); - streamingMessage.As>().From.Should().Be("assistant"); - } - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task SemanticKernelChatMessageContentConnectorTestAsync() - { - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - var builder = Kernel.CreateBuilder() - .AddAzureOpenAIChatCompletion(deploymentName, endpoint, key); - - var kernel = builder.Build(); - - var skAgent = new SemanticKernelAgent(kernel, "assistant") - .RegisterMessageConnector(); - - var messages = new IMessage[] - { - MessageEnvelope.Create(new ChatMessageContent(AuthorRole.Assistant, "Hello")), - new TextMessage(Role.Assistant, "Hello", from: "user"), new MultiModalMessage(Role.Assistant, - [ - new TextMessage(Role.Assistant, "Hello", from: "user"), - ], - from: "user"), - }; - - foreach (var message in messages) - { - var reply = await skAgent.SendAsync(message); - - reply.Should().BeOfType(); - reply.As().From.Should().Be("assistant"); - } - - // test streaming - foreach (var message in messages) - { - var reply = skAgent.GenerateStreamingReplyAsync([message]); - - await foreach (var streamingMessage in reply) - { - streamingMessage.Should().BeOfType(); - streamingMessage.As().From.Should().Be("assistant"); - } - } - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task SemanticKernelPluginTestAsync() - { - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - var builder = Kernel.CreateBuilder() - .AddAzureOpenAIChatCompletion(deploymentName, endpoint, key); - - var parameters = this.GetWeatherAsyncFunctionContract.Parameters!.Select(p => new KernelParameterMetadata(p.Name!) - { - Description = p.Description, - DefaultValue = p.DefaultValue, - IsRequired = p.IsRequired, - ParameterType = p.ParameterType, - }); - var function = KernelFunctionFactory.CreateFromMethod(this.GetWeatherAsync, this.GetWeatherAsyncFunctionContract.Name, this.GetWeatherAsyncFunctionContract.Description, parameters); - builder.Plugins.AddFromFunctions("plugins", [function]); - var kernel = builder.Build(); - - var skAgent = new SemanticKernelAgent(kernel, "assistant") - .RegisterMessageConnector(); - - skAgent.StreamingMiddlewares.Count().Should().Be(1); - - var question = "What is the weather in Seattle?"; - var reply = await skAgent.SendAsync(question); - - reply.GetContent()!.ToLower().Should().Contain("seattle"); - reply.GetContent()!.ToLower().Should().Contain("sunny"); - } - - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task BasicSkChatCompletionAgentConversationTestAsync() - { - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - var builder = Kernel.CreateBuilder() - .AddAzureOpenAIChatCompletion(deploymentName, endpoint, key); - - var kernel = builder.Build(); - var agent = new ChatCompletionAgent() - { - Kernel = kernel, - Name = "assistant", - Instructions = "You are a helpful AI assistant" - }; - - var skAgent = new SemanticKernelChatCompletionAgent(agent); - - var chatMessageContent = MessageEnvelope.Create(new ChatMessageContent(AuthorRole.Assistant, "Hello")); - var reply = await skAgent.SendAsync(chatMessageContent); - - reply.Should().BeOfType>(); - reply.As>().From.Should().Be("assistant"); - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task SkChatCompletionAgentChatMessageContentConnectorTestAsync() - { - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - var builder = Kernel.CreateBuilder() - .AddAzureOpenAIChatCompletion(deploymentName, endpoint, key); - - var kernel = builder.Build(); - - var connector = new SemanticKernelChatMessageContentConnector(); - var agent = new ChatCompletionAgent() - { - Kernel = kernel, - Name = "assistant", - Instructions = "You are a helpful AI assistant" - }; - var skAgent = new SemanticKernelChatCompletionAgent(agent) - .RegisterMiddleware(connector); - - var messages = new IMessage[] - { - MessageEnvelope.Create(new ChatMessageContent(AuthorRole.Assistant, "Hello")), - new TextMessage(Role.Assistant, "Hello", from: "user"), new MultiModalMessage(Role.Assistant, - [ - new TextMessage(Role.Assistant, "Hello", from: "user"), - ], - from: "user"), - }; - - foreach (var message in messages) - { - var reply = await skAgent.SendAsync(message); - - reply.Should().BeOfType(); - reply.As().From.Should().Be("assistant"); - } - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task SkChatCompletionAgentPluginTestAsync() - { - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - var builder = Kernel.CreateBuilder() - .AddAzureOpenAIChatCompletion(deploymentName, endpoint, key); - - var parameters = this.GetWeatherAsyncFunctionContract.Parameters!.Select(p => new KernelParameterMetadata(p.Name!) - { - Description = p.Description, - DefaultValue = p.DefaultValue, - IsRequired = p.IsRequired, - ParameterType = p.ParameterType, - }); - var function = KernelFunctionFactory.CreateFromMethod(this.GetWeatherAsync, this.GetWeatherAsyncFunctionContract.Name, this.GetWeatherAsyncFunctionContract.Description, parameters); - builder.Plugins.AddFromFunctions("plugins", [function]); - var kernel = builder.Build(); - - var agent = new ChatCompletionAgent() - { - Kernel = kernel, - Name = "assistant", - Instructions = "You are a helpful AI assistant", - ExecutionSettings = - new OpenAIPromptExecutionSettings() - { - ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions - } - }; - var skAgent = - new SemanticKernelChatCompletionAgent(agent).RegisterMiddleware( - new SemanticKernelChatMessageContentConnector()); - - var question = "What is the weather in Seattle?"; - var reply = await skAgent.SendAsync(question); - - reply.GetContent()!.ToLower().Should().Contain("seattle"); - reply.GetContent()!.ToLower().Should().Contain("sunny"); - } -} diff --git a/dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionCallTemplateTests.TestFunctionCallTemplate.approved.txt b/dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionCallTemplateTests.TestFunctionCallTemplate.approved.txt deleted file mode 100644 index ea5a8585cc..0000000000 --- a/dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionCallTemplateTests.TestFunctionCallTemplate.approved.txt +++ /dev/null @@ -1,65 +0,0 @@ -ο»Ώο»Ώ//---------------------- -// -// This code was generated by a tool. -// -//---------------------- -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading.Tasks; -using System; -using AutoGen.Core; - -namespace AutoGen.SourceGenerator.Tests -{ - public partial class FunctionExamples - { - - private class AddAsyncSchema - { - [JsonPropertyName(@"a")] - public System.Int32 a {get; set;} - [JsonPropertyName(@"b")] - public System.Int32 b {get; set;} - } - - public System.Threading.Tasks.Task`1[System.String] AddAsyncWrapper(string arguments) - { - var schema = JsonSerializer.Deserialize( - arguments, - new JsonSerializerOptions - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - }); - - return AddAsync(schema.a, schema.b); - } - - public FunctionContract AddAsyncFunctionContract - { - get => new FunctionContract - { - Name = @"AddAsync", - Description = @"Add two numbers.", - ReturnType = typeof(System.Threading.Tasks.Task`1[System.String]), - Parameters = new global::AutoGen.Core.FunctionParameterContract[] - { - new FunctionParameterContract - { - Name = @"a", - Description = @"The first number.", - ParameterType = typeof(System.Int32), - IsRequired = true, - }, - new FunctionParameterContract - { - Name = @"b", - Description = @"The second number.", - ParameterType = typeof(System.Int32), - IsRequired = true, - }, - }, - }; - } - } -} - diff --git a/dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionExample.Add_Test.approved.txt b/dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionExample.Add_Test.approved.txt deleted file mode 100644 index 9075d35b95..0000000000 --- a/dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionExample.Add_Test.approved.txt +++ /dev/null @@ -1,21 +0,0 @@ -{ - "name": "Add", - "description": "Add function", - "parameters": { - "type": "object", - "properties": { - "a": { - "type": "integer", - "description": "a" - }, - "b": { - "type": "integer", - "description": "b" - } - }, - "required": [ - "a", - "b" - ] - } -} \ No newline at end of file diff --git a/dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionExample.DictionaryToString_Test.approved.txt b/dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionExample.DictionaryToString_Test.approved.txt deleted file mode 100644 index 8b6aad2fcd..0000000000 --- a/dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionExample.DictionaryToString_Test.approved.txt +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "DictionaryToStringAsync", - "description": "DictionaryToString function", - "parameters": { - "type": "object", - "properties": { - "xargs": { - "type": "object", - "additionalProperties": { - "type": "string" - }, - "description": "an object of key-value pairs. key is string, value is string" - } - }, - "required": [ - "xargs" - ] - } -} \ No newline at end of file diff --git a/dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionExample.Query_Test.approved.txt b/dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionExample.Query_Test.approved.txt deleted file mode 100644 index 6d16b5a91c..0000000000 --- a/dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionExample.Query_Test.approved.txt +++ /dev/null @@ -1,24 +0,0 @@ -{ - "name": "Query", - "description": "query function", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "query, required" - }, - "k": { - "type": "integer", - "description": "top k, optional, default value is 3" - }, - "thresold": { - "type": "number", - "description": "thresold, optional, default value is 0.5" - } - }, - "required": [ - "query" - ] - } -} \ No newline at end of file diff --git a/dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionExample.Sum_Test.approved.txt b/dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionExample.Sum_Test.approved.txt deleted file mode 100644 index ce86faf6a6..0000000000 --- a/dotnet/test/AutoGen.SourceGenerator.Tests/ApprovalTests/FunctionExample.Sum_Test.approved.txt +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "Sum", - "description": "Sum function", - "parameters": { - "type": "object", - "properties": { - "args": { - "type": "array", - "items": { - "type": "number" - }, - "description": "an array of double values" - } - }, - "required": [ - "args" - ] - } -} \ No newline at end of file diff --git a/dotnet/test/AutoGen.SourceGenerator.Tests/AutoGen.SourceGenerator.Tests.csproj b/dotnet/test/AutoGen.SourceGenerator.Tests/AutoGen.SourceGenerator.Tests.csproj deleted file mode 100644 index f7d814a6cd..0000000000 --- a/dotnet/test/AutoGen.SourceGenerator.Tests/AutoGen.SourceGenerator.Tests.csproj +++ /dev/null @@ -1,16 +0,0 @@ -ο»Ώ - - - $(TestTargetFrameworks) - enable - false - True - True - - - - - - - - \ No newline at end of file diff --git a/dotnet/test/AutoGen.SourceGenerator.Tests/FilescopeNamespaceFunctionExample.cs b/dotnet/test/AutoGen.SourceGenerator.Tests/FilescopeNamespaceFunctionExample.cs deleted file mode 100644 index 27978c2712..0000000000 --- a/dotnet/test/AutoGen.SourceGenerator.Tests/FilescopeNamespaceFunctionExample.cs +++ /dev/null @@ -1,20 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// FilescopeNamespaceFunctionExample.cs - -using AutoGen.Core; - -namespace AutoGen.SourceGenerator.Tests; -public partial class FilescopeNamespaceFunctionExample -{ - [Function] - public Task Add(int a, int b) - { - return Task.FromResult($"{a + b}"); - } -} diff --git a/dotnet/test/AutoGen.SourceGenerator.Tests/FunctionCallTemplateEncodingTests.cs b/dotnet/test/AutoGen.SourceGenerator.Tests/FunctionCallTemplateEncodingTests.cs deleted file mode 100644 index 9b4665f835..0000000000 --- a/dotnet/test/AutoGen.SourceGenerator.Tests/FunctionCallTemplateEncodingTests.cs +++ /dev/null @@ -1,100 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// FunctionCallTemplateEncodingTests.cs - -using System.Text.Json; // Needed for JsonSerializer -using AutoGen.SourceGenerator.Template; // Needed for FunctionCallTemplate -using Xunit; // Needed for Fact and Assert - -namespace AutoGen.SourceGenerator.Tests -{ - public class FunctionCallTemplateEncodingTests - { - private readonly JsonSerializerOptions jsonSerializerOptions = new JsonSerializerOptions - { - WriteIndented = true, - }; - - [Fact] - public void FunctionDescription_Should_Encode_DoubleQuotes() - { - // Arrange - var functionContracts = new List - { - new SourceGeneratorFunctionContract - { - Name = "TestFunction", - Description = "This is a \"test\" function", - Parameters = new SourceGeneratorParameterContract[] - { - new SourceGeneratorParameterContract - { - Name = "param1", - Description = "This is a \"parameter\" description", - Type = "string", - IsOptional = false - } - }, - ReturnType = "void" - } - }; - - var template = new FunctionCallTemplate - { - NameSpace = "TestNamespace", - ClassName = "TestClass", - FunctionContracts = functionContracts - }; - - // Act - var result = template.TransformText(); - - // Assert - Assert.Contains("Description = @\"This is a \"\"test\"\" function\"", result); - Assert.Contains("Description = @\"This is a \"\"parameter\"\" description\"", result); - } - - [Fact] - public void ParameterDescription_Should_Encode_DoubleQuotes() - { - // Arrange - var functionContracts = new List - { - new SourceGeneratorFunctionContract - { - Name = "TestFunction", - Description = "This is a test function", - Parameters = new SourceGeneratorParameterContract[] - { - new SourceGeneratorParameterContract - { - Name = "param1", - Description = "This is a \"parameter\" description", - Type = "string", - IsOptional = false - } - }, - ReturnType = "void" - } - }; - - var template = new FunctionCallTemplate - { - NameSpace = "TestNamespace", - ClassName = "TestClass", - FunctionContracts = functionContracts - }; - - // Act - var result = template.TransformText(); - - // Assert - Assert.Contains("Description = @\"This is a \"\"parameter\"\" description\"", result); - } - } -} diff --git a/dotnet/test/AutoGen.SourceGenerator.Tests/FunctionCallTemplateTests.cs b/dotnet/test/AutoGen.SourceGenerator.Tests/FunctionCallTemplateTests.cs deleted file mode 100644 index 3db469a7f1..0000000000 --- a/dotnet/test/AutoGen.SourceGenerator.Tests/FunctionCallTemplateTests.cs +++ /dev/null @@ -1,52 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// FunctionCallTemplateTests.cs - -using ApprovalTests; -using ApprovalTests.Namers; -using ApprovalTests.Reporters; -using AutoGen.SourceGenerator.Template; -using Xunit; - -namespace AutoGen.SourceGenerator.Tests; - -public class FunctionCallTemplateTests -{ - [Fact] - [UseReporter(typeof(DiffReporter))] - [UseApprovalSubdirectory("ApprovalTests")] - public void TestFunctionCallTemplate() - { - var functionExample = new FunctionExamples(); - var function = functionExample.AddAsyncFunctionContract; - var functionCallTemplate = new FunctionCallTemplate() - { - ClassName = function.ClassName, - NameSpace = function.Namespace, - FunctionContracts = [new SourceGeneratorFunctionContract() - { - Name = function.Name, - Description = function.Description, - ReturnType = function.ReturnType!.ToString(), - ReturnDescription = function.ReturnDescription, - Parameters = function.Parameters!.Select(p => new SourceGeneratorParameterContract() - { - Name = p.Name, - Description = p.Description, - Type = p.ParameterType!.ToString(), - IsOptional = !p.IsRequired, - JsonType = p.ParameterType!.ToString(), - }).ToArray() - }] - }; - - var actual = functionCallTemplate.TransformText(); - - Approvals.Verify(actual); - } -} diff --git a/dotnet/test/AutoGen.SourceGenerator.Tests/FunctionExample.test.cs b/dotnet/test/AutoGen.SourceGenerator.Tests/FunctionExample.test.cs deleted file mode 100644 index 72695b9edc..0000000000 --- a/dotnet/test/AutoGen.SourceGenerator.Tests/FunctionExample.test.cs +++ /dev/null @@ -1,137 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// FunctionExample.test.cs - -using System.Text.Json; -using ApprovalTests; -using ApprovalTests.Namers; -using ApprovalTests.Reporters; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -using FluentAssertions; -using Xunit; - -namespace AutoGen.SourceGenerator.Tests -{ - public class FunctionExample - { - private readonly FunctionExamples functionExamples = new FunctionExamples(); - private readonly JsonSerializerOptions jsonSerializerOptions = new JsonSerializerOptions - { - WriteIndented = true, - }; - - [Fact] - public void Add_Test() - { - var args = new - { - a = 1, - b = 2, - }; - - this.VerifyFunction(functionExamples.AddWrapper, args, 3); - this.VerifyFunctionDefinition(functionExamples.AddFunctionContract.ToOpenAIFunctionDefinition()); - } - - [Fact] - public void Sum_Test() - { - var args = new - { - args = new double[] { 1, 2, 3 }, - }; - - this.VerifyFunction(functionExamples.SumWrapper, args, 6.0); - this.VerifyFunctionDefinition(functionExamples.SumFunctionContract.ToOpenAIFunctionDefinition()); - } - - [Fact] - public async Task DictionaryToString_Test() - { - var args = new - { - xargs = new Dictionary - { - { "a", "1" }, - { "b", "2" }, - }, - }; - - await this.VerifyAsyncFunction(functionExamples.DictionaryToStringAsyncWrapper, args, JsonSerializer.Serialize(args.xargs, jsonSerializerOptions)); - this.VerifyFunctionDefinition(functionExamples.DictionaryToStringAsyncFunctionContract.ToOpenAIFunctionDefinition()); - } - - [Fact] - public async Task TopLevelFunctionExampleAddTestAsync() - { - var example = new TopLevelStatementFunctionExample(); - var args = new - { - a = 1, - b = 2, - }; - - await this.VerifyAsyncFunction(example.AddWrapper, args, "3"); - } - - [Fact] - public async Task FilescopeFunctionExampleAddTestAsync() - { - var example = new FilescopeNamespaceFunctionExample(); - var args = new - { - a = 1, - b = 2, - }; - - await this.VerifyAsyncFunction(example.AddWrapper, args, "3"); - } - - [Fact] - public void Query_Test() - { - var args = new - { - query = "hello", - k = 3, - }; - - this.VerifyFunction(functionExamples.QueryWrapper, args, new[] { "hello", "hello", "hello" }); - this.VerifyFunctionDefinition(functionExamples.QueryFunctionContract.ToOpenAIFunctionDefinition()); - } - - [UseReporter(typeof(DiffReporter))] - [UseApprovalSubdirectory("ApprovalTests")] - private void VerifyFunctionDefinition(FunctionDefinition function) - { - var func = new - { - name = function.Name, - description = function.Description.Replace(Environment.NewLine, ","), - parameters = function.Parameters.ToObjectFromJson(options: jsonSerializerOptions), - }; - - Approvals.Verify(JsonSerializer.Serialize(func, jsonSerializerOptions)); - } - - private void VerifyFunction(Func func, U args, T expected) - { - var str = JsonSerializer.Serialize(args, jsonSerializerOptions); - var res = func(str); - res.Should().BeEquivalentTo(expected); - } - - private async Task VerifyAsyncFunction(Func> func, U args, T expected) - { - var str = JsonSerializer.Serialize(args, jsonSerializerOptions); - var res = await func(str); - res.Should().BeEquivalentTo(expected); - } - } -} diff --git a/dotnet/test/AutoGen.SourceGenerator.Tests/FunctionExamples.cs b/dotnet/test/AutoGen.SourceGenerator.Tests/FunctionExamples.cs deleted file mode 100644 index 4d208e955f..0000000000 --- a/dotnet/test/AutoGen.SourceGenerator.Tests/FunctionExamples.cs +++ /dev/null @@ -1,76 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// FunctionExamples.cs - -using System.Text.Json; -using AutoGen.Core; - -namespace AutoGen.SourceGenerator.Tests -{ - public partial class FunctionExamples - { - /// - /// Add function - /// - /// a - /// b - [FunctionAttribute] - public int Add(int a, int b) - { - return a + b; - } - - /// - /// Add two numbers. - /// - /// The first number. - /// The second number. - [Function] - public Task AddAsync(int a, int b) - { - return Task.FromResult($"{a} + {b} = {a + b}"); - } - - /// - /// Sum function - /// - /// an array of double values - [FunctionAttribute] - public double Sum(double[] args) - { - return args.Sum(); - } - - /// - /// DictionaryToString function - /// - /// an object of key-value pairs. key is string, value is string - [FunctionAttribute] - public Task DictionaryToStringAsync(Dictionary xargs) - { - var res = JsonSerializer.Serialize(xargs, new JsonSerializerOptions - { - WriteIndented = true, - }); - - return Task.FromResult(res); - } - - /// - /// query function - /// - /// query, required - /// top k, optional, default value is 3 - /// thresold, optional, default value is 0.5 - [FunctionAttribute] - public string[] Query(string query, int k = 3, float thresold = 0.5f) - { - return Enumerable.Repeat(query, k).ToArray(); - } - } -} diff --git a/dotnet/test/AutoGen.SourceGenerator.Tests/TopLevelStatementFunctionExample.cs b/dotnet/test/AutoGen.SourceGenerator.Tests/TopLevelStatementFunctionExample.cs deleted file mode 100644 index 1e306f8829..0000000000 --- a/dotnet/test/AutoGen.SourceGenerator.Tests/TopLevelStatementFunctionExample.cs +++ /dev/null @@ -1,19 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// TopLevelStatementFunctionExample.cs - -using AutoGen.Core; - -public partial class TopLevelStatementFunctionExample -{ - [Function] - public Task Add(int a, int b) - { - return Task.FromResult($"{a + b}"); - } -} diff --git a/dotnet/test/AutoGen.Test.Share/Attribute/EnvironmentSpecificFactAttribute.cs b/dotnet/test/AutoGen.Test.Share/Attribute/EnvironmentSpecificFactAttribute.cs deleted file mode 100644 index b3be7547fc..0000000000 --- a/dotnet/test/AutoGen.Test.Share/Attribute/EnvironmentSpecificFactAttribute.cs +++ /dev/null @@ -1,37 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// EnvironmentSpecificFactAttribute.cs - -using Xunit; - -namespace AutoGen.Tests; - -/// -/// A base class for environment-specific fact attributes. -/// -[AttributeUsage(AttributeTargets.Method, AllowMultiple = false, Inherited = true)] -public abstract class EnvironmentSpecificFactAttribute : FactAttribute -{ - private readonly string _skipMessage; - - /// - /// Creates a new instance of the class. - /// - /// The message to be used when skipping the test marked with this attribute. - protected EnvironmentSpecificFactAttribute(string skipMessage) - { - _skipMessage = skipMessage ?? throw new ArgumentNullException(nameof(skipMessage)); - } - - public sealed override string Skip => IsEnvironmentSupported() ? string.Empty : _skipMessage; - - /// - /// A method used to evaluate whether to skip a test marked with this attribute. Skips iff this method evaluates to false. - /// - protected abstract bool IsEnvironmentSupported(); -} diff --git a/dotnet/test/AutoGen.Test.Share/Attribute/OpenAIFact.cs b/dotnet/test/AutoGen.Test.Share/Attribute/OpenAIFact.cs deleted file mode 100644 index 7c35163dee..0000000000 --- a/dotnet/test/AutoGen.Test.Share/Attribute/OpenAIFact.cs +++ /dev/null @@ -1,28 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIFact.cs - -namespace AutoGen.Tests; - -/// -/// A fact for tests requiring OPENAI_API_KEY env. -/// -public sealed class ApiKeyFactAttribute : EnvironmentSpecificFactAttribute -{ - private readonly string[] _envVariableNames; - public ApiKeyFactAttribute(params string[] envVariableNames) : base($"{envVariableNames} is not found in env") - { - _envVariableNames = envVariableNames; - } - - /// - protected override bool IsEnvironmentSupported() - { - return _envVariableNames.All(Environment.GetEnvironmentVariables().Contains); - } -} diff --git a/dotnet/test/AutoGen.Test.Share/AutoGen.Tests.Share.csproj b/dotnet/test/AutoGen.Test.Share/AutoGen.Tests.Share.csproj deleted file mode 100644 index 21c71896dd..0000000000 --- a/dotnet/test/AutoGen.Test.Share/AutoGen.Tests.Share.csproj +++ /dev/null @@ -1,15 +0,0 @@ -ο»Ώ - - - $(TestTargetFrameworks) - enable - false - True - enable - - - - - - - diff --git a/dotnet/test/AutoGen.Test.Share/EchoAgent.cs b/dotnet/test/AutoGen.Test.Share/EchoAgent.cs deleted file mode 100644 index 78ef11d5de..0000000000 --- a/dotnet/test/AutoGen.Test.Share/EchoAgent.cs +++ /dev/null @@ -1,43 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// EchoAgent.cs - -using System.Runtime.CompilerServices; -using AutoGen.Core; - -namespace AutoGen.Tests; - -public class EchoAgent : IStreamingAgent -{ - public EchoAgent(string name) - { - Name = name; - } - public string Name { get; } - - public Task GenerateReplyAsync( - IEnumerable conversation, - GenerateReplyOptions? options = null, - CancellationToken ct = default) - { - // return the most recent message - var lastMessage = conversation.Last(); - lastMessage.From = this.Name; - - return Task.FromResult(lastMessage); - } - - public async IAsyncEnumerable GenerateStreamingReplyAsync(IEnumerable messages, GenerateReplyOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - foreach (var message in messages) - { - message.From = this.Name; - yield return message; - } - } -} diff --git a/dotnet/test/AutoGen.Tests/ApprovalTests/square.png b/dotnet/test/AutoGen.Tests/ApprovalTests/square.png deleted file mode 100644 index afb4f4cd4d..0000000000 --- a/dotnet/test/AutoGen.Tests/ApprovalTests/square.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8323d0b8eceb752e14c29543b2e28bb2fc648ed9719095c31b7708867a4dc918 -size 491 diff --git a/dotnet/test/AutoGen.Tests/AutoGen.Tests.csproj b/dotnet/test/AutoGen.Tests/AutoGen.Tests.csproj deleted file mode 100644 index a0c3b815f2..0000000000 --- a/dotnet/test/AutoGen.Tests/AutoGen.Tests.csproj +++ /dev/null @@ -1,24 +0,0 @@ -ο»Ώ - - - $(TestTargetFrameworks) - True - True - $(NoWarn);xUnit1013;SKEXP0110 - - - - - - - - - - - - - PreserveNewest - - - - diff --git a/dotnet/test/AutoGen.Tests/BasicSampleTest.cs b/dotnet/test/AutoGen.Tests/BasicSampleTest.cs deleted file mode 100644 index d5ce15ae3f..0000000000 --- a/dotnet/test/AutoGen.Tests/BasicSampleTest.cs +++ /dev/null @@ -1,86 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// BasicSampleTest.cs - -using System; -using System.IO; -using System.Threading.Tasks; -using AutoGen.BasicSample; -using Xunit.Abstractions; - -namespace AutoGen.Tests -{ - public class BasicSampleTest - { - private readonly ITestOutputHelper _output; - - public BasicSampleTest(ITestOutputHelper output) - { - _output = output; - Console.SetOut(new ConsoleWriter(_output)); - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task AssistantAgentTestAsync() - { - await Example01_AssistantAgent.RunAsync(); - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task TwoAgentMathClassTestAsync() - { - await Example02_TwoAgent_MathChat.RunAsync(); - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task AgentFunctionCallTestAsync() - { - await Example03_Agent_FunctionCall.RunAsync(); - } - - - [ApiKeyFact("MISTRAL_API_KEY")] - public async Task MistralClientAgent_TokenCount() - { - await Example14_MistralClientAgent_TokenCount.RunAsync(); - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task DynamicGroupChatCalculateFibonacciAsync() - { - await Example07_Dynamic_GroupChat_Calculate_Fibonacci.RunAsync(); - await Example07_Dynamic_GroupChat_Calculate_Fibonacci.RunWorkflowAsync(); - } - - [ApiKeyFact("OPENAI_API_KEY")] - public async Task DalleAndGPT4VTestAsync() - { - await Example05_Dalle_And_GPT4V.RunAsync(); - } - - [ApiKeyFact("OPENAI_API_KEY")] - public async Task GPT4ImageMessage() - { - await Example15_GPT4V_BinaryDataImageMessage.RunAsync(); - } - - public class ConsoleWriter : StringWriter - { - private ITestOutputHelper output; - public ConsoleWriter(ITestOutputHelper output) - { - this.output = output; - } - - public override void WriteLine(string? m) - { - output.WriteLine(m); - } - } - } -} diff --git a/dotnet/test/AutoGen.Tests/GlobalUsing.cs b/dotnet/test/AutoGen.Tests/GlobalUsing.cs deleted file mode 100644 index 3c28defb3c..0000000000 --- a/dotnet/test/AutoGen.Tests/GlobalUsing.cs +++ /dev/null @@ -1,10 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GlobalUsing.cs - -global using AutoGen.Core; diff --git a/dotnet/test/AutoGen.Tests/GroupChat/GraphTests.cs b/dotnet/test/AutoGen.Tests/GroupChat/GraphTests.cs deleted file mode 100644 index 3e63cc64cb..0000000000 --- a/dotnet/test/AutoGen.Tests/GroupChat/GraphTests.cs +++ /dev/null @@ -1,26 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GraphTests.cs - -using Xunit; - -namespace AutoGen.Tests -{ - public class GraphTests - { - [Fact] - public void GraphTest() - { - var graph1 = new Graph(); - Assert.NotNull(graph1); - - var graph2 = new Graph(null); - Assert.NotNull(graph2); - } - } -} diff --git a/dotnet/test/AutoGen.Tests/GroupChat/GroupChatTests.cs b/dotnet/test/AutoGen.Tests/GroupChat/GroupChatTests.cs deleted file mode 100644 index c811e3974b..0000000000 --- a/dotnet/test/AutoGen.Tests/GroupChat/GroupChatTests.cs +++ /dev/null @@ -1,119 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// GroupChatTests.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using FluentAssertions; -using Moq; -using Xunit; - -namespace AutoGen.Tests; - -public class GroupChatTests -{ - [Fact] - public async Task ItSendMessageTestAsync() - { - var alice = new DefaultReplyAgent("Alice", "I am alice"); - var bob = new DefaultReplyAgent("Bob", "I am bob"); - - var groupChat = new GroupChat([alice, bob]); - - var chatHistory = new List(); - - var maxRound = 10; - await foreach (var message in groupChat.SendAsync(chatHistory, maxRound)) - { - chatHistory.Add(message); - } - - chatHistory.Count().Should().Be(10); - } - - [Fact] - public async Task ItTerminateConversationWhenAgentReturnTerminateKeyWord() - { - var alice = new DefaultReplyAgent("Alice", "I am alice"); - var bob = new DefaultReplyAgent("Bob", "I am bob"); - var cathy = new DefaultReplyAgent("Cathy", $"I am cathy, {GroupChatExtension.TERMINATE}"); - - var groupChat = new GroupChat([alice, bob, cathy]); - - var chatHistory = new List(); - - var maxRound = 10; - await foreach (var message in groupChat.SendAsync(chatHistory, maxRound)) - { - chatHistory.Add(message); - } - - chatHistory.Count().Should().Be(3); - chatHistory.Last().From.Should().Be("Cathy"); - } - - [Fact] - public async Task ItSendAsyncDoesntAddDuplicateInitializeMessagesTest() - { - // fix #3268 - var alice = new DefaultReplyAgent("Alice", "I am alice"); - var bob = new DefaultReplyAgent("Bob", "I am bob"); - var cathy = new DefaultReplyAgent("Cathy", $"I am cathy, {GroupChatExtension.TERMINATE}"); - - var roundRobinOrchestrator = new RoundRobinOrchestrator(); - var orchestrator = Mock.Of(); - Mock.Get(orchestrator).Setup(x => x.GetNextSpeakerAsync(It.IsAny(), It.IsAny())) - .Returns((OrchestrationContext context, CancellationToken token) => - { - // determine if initialize message is already sent and not added twice - context.ChatHistory.Where(x => x.From == alice.Name).Count().Should().Be(1); - - return roundRobinOrchestrator.GetNextSpeakerAsync(context, token); - }); - - var groupChat = new GroupChat([alice, bob, cathy], orchestrator); - groupChat.AddInitializeMessage(new TextMessage(Role.User, "Hello", from: alice.Name)); - - var maxRound = 2; - var chatHistory = new List(); - await foreach (var message in groupChat.SendAsync(chatHistory, maxRound)) - { - chatHistory.Add(message); - } - - chatHistory.Count().Should().Be(2); - } - - [Fact] - public async Task ItTerminateConversationWhenNoSpeakerAvailable() - { - // fix #3306 - var alice = new DefaultReplyAgent("Alice", "I am alice"); - var bob = new DefaultReplyAgent("Bob", "I am bob"); - var cathy = new DefaultReplyAgent("Cathy", $"I am cathy, {GroupChatExtension.TERMINATE}"); - - var orchestrator = Mock.Of(); - Mock.Get(orchestrator).Setup(x => x.GetNextSpeakerAsync(It.IsAny(), It.IsAny())) - .ReturnsAsync((IAgent?)null); - - var groupChat = new GroupChat([alice, bob, cathy], orchestrator); - - var chatHistory = new List(); - - var maxRound = 10; - await foreach (var message in groupChat.SendAsync(chatHistory, maxRound)) - { - chatHistory.Add(message); - } - - chatHistory.Count().Should().Be(0); - } -} diff --git a/dotnet/test/AutoGen.Tests/ImageMessageTests.cs b/dotnet/test/AutoGen.Tests/ImageMessageTests.cs deleted file mode 100644 index 7c656e6de7..0000000000 --- a/dotnet/test/AutoGen.Tests/ImageMessageTests.cs +++ /dev/null @@ -1,44 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// ImageMessageTests.cs - -using System; -using System.IO; -using System.Threading.Tasks; -using FluentAssertions; -using Xunit; - -namespace AutoGen.Tests; - -public class ImageMessageTests -{ - [Fact] - public async Task ItCreateFromLocalImage() - { - var image = Path.Combine("testData", "images", "background.png"); - var binary = File.ReadAllBytes(image); - var base64 = Convert.ToBase64String(binary); - var imageMessage = new ImageMessage(Role.User, BinaryData.FromBytes(binary, "image/png")); - - imageMessage.MimeType.Should().Be("image/png"); - imageMessage.BuildDataUri().Should().Be($"data:image/png;base64,{base64}"); - } - - [Fact] - public async Task ItCreateFromUrl() - { - var image = Path.Combine("testData", "images", "background.png"); - var fullPath = Path.GetFullPath(image); - var localUrl = new Uri(fullPath).AbsoluteUri; - var imageMessage = new ImageMessage(Role.User, localUrl); - - imageMessage.Url.Should().Be(localUrl); - imageMessage.MimeType.Should().Be("image/png"); - imageMessage.Data.Should().BeNull(); - } -} diff --git a/dotnet/test/AutoGen.Tests/MiddlewareAgentTest.cs b/dotnet/test/AutoGen.Tests/MiddlewareAgentTest.cs deleted file mode 100644 index 90ade8a435..0000000000 --- a/dotnet/test/AutoGen.Tests/MiddlewareAgentTest.cs +++ /dev/null @@ -1,111 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MiddlewareAgentTest.cs - -using System.Linq; -using System.Threading.Tasks; -using FluentAssertions; -using Xunit; - -namespace AutoGen.Tests; - -public class MiddlewareAgentTest -{ - [Fact] - public async Task MiddlewareAgentUseTestAsync() - { - IAgent echoAgent = new EchoAgent("echo"); - - var middlewareAgent = new MiddlewareAgent(echoAgent); - - // no middleware added - // the reply should be the same as the original agent - middlewareAgent.Name.Should().Be("echo"); - var reply = await middlewareAgent.SendAsync("hello"); - reply.GetContent().Should().Be("hello"); - - middlewareAgent.Use(async (messages, options, agent, ct) => - { - var lastMessage = messages.Last() as TextMessage; - lastMessage!.Content = $"[middleware 0] {lastMessage.Content}"; - return await agent.GenerateReplyAsync(messages, options, ct); - }); - - reply = await middlewareAgent.SendAsync("hello"); - reply.GetContent().Should().Be("[middleware 0] hello"); - - middlewareAgent.Use(async (messages, options, agent, ct) => - { - var lastMessage = messages.Last() as TextMessage; - lastMessage!.Content = $"[middleware 1] {lastMessage.Content}"; - return await agent.GenerateReplyAsync(messages, options, ct); - }); - - // when multiple middleware are added, they will be executed in LIFO order - reply = await middlewareAgent.SendAsync("hello"); - reply.GetContent().Should().Be("[middleware 0] [middleware 1] hello"); - - // test short cut - // short cut middleware will not call next middleware - middlewareAgent.Use(async (messages, options, next, ct) => - { - var lastMessage = messages.Last() as TextMessage; - lastMessage!.Content = $"[middleware shortcut] {lastMessage.Content}"; - return lastMessage; - }); - reply = await middlewareAgent.SendAsync("hello"); - reply.GetContent().Should().Be("[middleware shortcut] hello"); - } - - [Fact] - public async Task RegisterMiddlewareTestAsync() - { - var echoAgent = new EchoAgent("echo"); - - // RegisterMiddleware will return a new agent and keep the original agent unchanged - var middlewareAgent = echoAgent.RegisterMiddleware(async (messages, options, agent, ct) => - { - var lastMessage = messages.Last() as TextMessage; - lastMessage!.Content = $"[middleware 0] {lastMessage.Content}"; - return await agent.GenerateReplyAsync(messages, options, ct); - }); - - middlewareAgent.Should().BeOfType>(); - middlewareAgent.Middlewares.Count().Should().Be(1); - var reply = await middlewareAgent.SendAsync("hello"); - reply.GetContent().Should().Be("[middleware 0] hello"); - reply = await echoAgent.SendAsync("hello"); - reply.GetContent().Should().Be("hello"); - - // when multiple middleware are added, they will be executed in LIFO order - middlewareAgent = middlewareAgent.RegisterMiddleware(async (messages, options, agent, ct) => - { - var lastMessage = messages.Last() as TextMessage; - lastMessage!.Content = $"[middleware 1] {lastMessage.Content}"; - return await agent.GenerateReplyAsync(messages, options, ct); - }); - - middlewareAgent.Middlewares.Count().Should().Be(2); - reply = await middlewareAgent.SendAsync("hello"); - reply.GetContent().Should().Be("[middleware 0] [middleware 1] hello"); - - // test short cut - // short cut middleware will not call next middleware - middlewareAgent = middlewareAgent.RegisterMiddleware(async (messages, options, agent, ct) => - { - var lastMessage = messages.Last() as TextMessage; - lastMessage!.Content = $"[middleware shortcut] {lastMessage.Content}"; - return lastMessage; - }); - - reply = await middlewareAgent.SendAsync("hello"); - reply.GetContent().Should().Be("[middleware shortcut] hello"); - - middlewareAgent.Middlewares.Count().Should().Be(3); - } -} diff --git a/dotnet/test/AutoGen.Tests/MiddlewareTest.cs b/dotnet/test/AutoGen.Tests/MiddlewareTest.cs deleted file mode 100644 index 34f1af872b..0000000000 --- a/dotnet/test/AutoGen.Tests/MiddlewareTest.cs +++ /dev/null @@ -1,132 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// MiddlewareTest.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text.Json; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using FluentAssertions; -using Xunit; - -namespace AutoGen.Tests; - -public partial class MiddlewareTest -{ - [Function] - public async Task Echo(string message) - { - return $"[FUNC] {message}"; - } - - [Fact] - public async Task HumanInputMiddlewareTestAsync() - { - var agent = new EchoAgent("echo"); - var neverAskUserInputMW = new HumanInputMiddleware(mode: HumanInputMode.NEVER); - - var neverInputAgent = agent.RegisterMiddleware(neverAskUserInputMW); - var reply = await neverInputAgent.SendAsync("hello"); - reply.GetContent()!.Should().Be("hello"); - reply.From.Should().Be("echo"); - - var alwaysAskUserInputMW = new HumanInputMiddleware( - mode: HumanInputMode.ALWAYS, - getInput: () => "input"); - - var alwaysInputAgent = agent.RegisterMiddleware(alwaysAskUserInputMW); - reply = await alwaysInputAgent.SendAsync("hello"); - reply.GetContent()!.Should().Be("input"); - reply.From.Should().Be("echo"); - - // test auto mode - // if the reply from echo is not terminate message, return the original reply - var autoAskUserInputMW = new HumanInputMiddleware( - mode: HumanInputMode.AUTO, - isTermination: async (messages, ct) => messages.Last()?.GetContent() == "terminate", - getInput: () => "input", - exitKeyword: "exit"); - var autoInputAgent = agent.RegisterMiddleware(autoAskUserInputMW); - reply = await autoInputAgent.SendAsync("hello"); - reply.GetContent()!.Should().Be("hello"); - - // if the reply from echo is terminate message, asking user for input - reply = await autoInputAgent.SendAsync("terminate"); - reply.GetContent()!.Should().Be("input"); - - // if the reply from echo is terminate message, and user input is exit, return the TERMINATE message - autoAskUserInputMW = new HumanInputMiddleware( - mode: HumanInputMode.AUTO, - isTermination: async (messages, ct) => messages.Last().GetContent() == "terminate", - getInput: () => "exit", - exitKeyword: "exit"); - autoInputAgent = agent.RegisterMiddleware(autoAskUserInputMW); - - reply = await autoInputAgent.SendAsync("terminate"); - reply.IsGroupChatTerminateMessage().Should().BeTrue(); - } - - [Fact] - public async Task FunctionCallMiddlewareTestAsync() - { - var agent = new EchoAgent("echo"); - var args = new EchoSchema { message = "hello" }; - var argsJson = JsonSerializer.Serialize(args) ?? throw new InvalidOperationException("Failed to serialize args"); - var functionCall = new FunctionCall("echo", argsJson); - var functionCallAgent = agent.RegisterMiddleware(async (messages, options, agent, ct) => - { - if (options?.Functions is null) - { - return await agent.GenerateReplyAsync(messages, options, ct); - } - - return new ToolCallMessage(functionCall.Name, functionCall.Arguments, from: agent.Name); - }); - - // test 1 - // middleware should invoke function call if the message is a function call message - var mw = new FunctionCallMiddleware( - functionMap: new Dictionary>> { { "echo", EchoWrapper } }); - - var testAgent = agent.RegisterMiddleware(mw); - var functionCallMessage = new ToolCallMessage(functionCall.Name, functionCall.Arguments, from: "user"); - var reply = await testAgent.SendAsync(functionCallMessage); - reply.Should().BeOfType(); - reply.GetContent()!.Should().Be("[FUNC] hello"); - reply.From.Should().Be("echo"); - - // test 2 - // middleware should invoke function call if agent reply is a function call message - mw = new FunctionCallMiddleware( - functions: [this.EchoFunctionContract], - functionMap: new Dictionary>> { { "echo", EchoWrapper } }); - testAgent = functionCallAgent.RegisterMiddleware(mw); - reply = await testAgent.SendAsync("hello"); - reply.GetContent()!.Should().Be("[FUNC] hello"); - reply.From.Should().Be("echo"); - - // test 3 - // middleware should return original reply if the reply from agent is not a function call message - mw = new FunctionCallMiddleware( - functionMap: new Dictionary>> { { "echo", EchoWrapper } }); - testAgent = agent.RegisterMiddleware(mw); - reply = await testAgent.SendAsync("hello"); - reply.GetContent()!.Should().Be("hello"); - reply.From.Should().Be("echo"); - - // test 4 - // middleware should return an error message if the function name is not available when invoking the function from previous agent reply - mw = new FunctionCallMiddleware( - functionMap: new Dictionary>> { { "echo2", EchoWrapper } }); - testAgent = agent.RegisterMiddleware(mw); - reply = await testAgent.SendAsync(functionCallMessage); - reply.GetContent()!.Should().Be("Function echo is not available. Available functions are: echo2"); - } -} diff --git a/dotnet/test/AutoGen.Tests/Orchestrator/RolePlayOrchestratorTests.cs b/dotnet/test/AutoGen.Tests/Orchestrator/RolePlayOrchestratorTests.cs deleted file mode 100644 index d454dcf9fe..0000000000 --- a/dotnet/test/AutoGen.Tests/Orchestrator/RolePlayOrchestratorTests.cs +++ /dev/null @@ -1,387 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// RolePlayOrchestratorTests.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using AutoGen.Anthropic; -using AutoGen.Anthropic.Extensions; -using AutoGen.Anthropic.Utils; -using AutoGen.AzureAIInference; -using AutoGen.AzureAIInference.Extension; -using AutoGen.Gemini; -using AutoGen.Mistral; -using AutoGen.Mistral.Extension; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.Inference; -using Azure.AI.OpenAI; -using FluentAssertions; -using Moq; -using Xunit; - -namespace AutoGen.Tests; - -public class RolePlayOrchestratorTests -{ - [Fact] - public async Task ItReturnNextSpeakerTestAsync() - { - var admin = Mock.Of(); - Mock.Get(admin).Setup(x => x.Name).Returns("Admin"); - Mock.Get(admin).Setup(x => x.GenerateReplyAsync( - It.IsAny>(), - It.IsAny(), - It.IsAny())) - .Callback, GenerateReplyOptions, CancellationToken>((messages, option, _) => - { - // verify prompt - var rolePlayPrompt = messages.First().GetContent(); - rolePlayPrompt.Should().Contain("You are in a role play game. Carefully read the conversation history and carry on the conversation"); - rolePlayPrompt.Should().Contain("The available roles are:"); - rolePlayPrompt.Should().Contain("Alice,Bob"); - rolePlayPrompt.Should().Contain("From Alice:"); - option.StopSequence.Should().BeEquivalentTo([":"]); - option.Temperature.Should().Be(0); - option.MaxToken.Should().Be(128); - option.Functions.Should().BeNull(); - }) - .ReturnsAsync(new TextMessage(Role.Assistant, "From Alice")); - - var alice = new EchoAgent("Alice"); - var bob = new EchoAgent("Bob"); - - var orchestrator = new RolePlayOrchestrator(admin); - var context = new OrchestrationContext - { - Candidates = [alice, bob], - ChatHistory = [], - }; - - var speaker = await orchestrator.GetNextSpeakerAsync(context); - speaker.Should().Be(alice); - } - - [Fact] - public async Task ItReturnNullWhenNoCandidateIsAvailableAsync() - { - var admin = Mock.Of(); - var orchestrator = new RolePlayOrchestrator(admin); - var context = new OrchestrationContext - { - Candidates = [], - ChatHistory = [], - }; - - var speaker = await orchestrator.GetNextSpeakerAsync(context); - speaker.Should().BeNull(); - } - - [Fact] - public async Task ItReturnCandidateWhenOnlyOneCandidateIsAvailableAsync() - { - var admin = Mock.Of(); - var alice = new EchoAgent("Alice"); - var orchestrator = new RolePlayOrchestrator(admin); - var context = new OrchestrationContext - { - Candidates = [alice], - ChatHistory = [], - }; - - var speaker = await orchestrator.GetNextSpeakerAsync(context); - speaker.Should().Be(alice); - } - - [Fact] - public async Task ItThrowExceptionWhenAdminFailsToFollowPromptAsync() - { - var admin = Mock.Of(); - Mock.Get(admin).Setup(x => x.Name).Returns("Admin"); - Mock.Get(admin).Setup(x => x.GenerateReplyAsync( - It.IsAny>(), - It.IsAny(), - It.IsAny())) - .ReturnsAsync(new TextMessage(Role.Assistant, "I don't know")); // admin fails to follow the prompt and returns an invalid message - - var alice = new EchoAgent("Alice"); - var bob = new EchoAgent("Bob"); - - var orchestrator = new RolePlayOrchestrator(admin); - var context = new OrchestrationContext - { - Candidates = [alice, bob], - ChatHistory = [], - }; - - var action = async () => await orchestrator.GetNextSpeakerAsync(context); - - await action.Should().ThrowAsync() - .WithMessage("The response from admin is 't know, which is either not in the candidates list or not in the correct format."); - } - - [Fact] - public async Task ItSelectNextSpeakerFromWorkflowIfProvided() - { - var workflow = new Graph(); - var alice = new EchoAgent("Alice"); - var bob = new EchoAgent("Bob"); - var charlie = new EchoAgent("Charlie"); - workflow.AddTransition(Transition.Create(alice, bob)); - workflow.AddTransition(Transition.Create(bob, charlie)); - workflow.AddTransition(Transition.Create(charlie, alice)); - - var admin = Mock.Of(); - var orchestrator = new RolePlayOrchestrator(admin, workflow); - var context = new OrchestrationContext - { - Candidates = [alice, bob, charlie], - ChatHistory = - [ - new TextMessage(Role.User, "Hello, Bob", from: "Alice"), - ], - }; - - var speaker = await orchestrator.GetNextSpeakerAsync(context); - speaker.Should().Be(bob); - } - - [Fact] - public async Task ItReturnNullIfNoAvailableAgentFromWorkflowAsync() - { - var workflow = new Graph(); - var alice = new EchoAgent("Alice"); - var bob = new EchoAgent("Bob"); - workflow.AddTransition(Transition.Create(alice, bob)); - - var admin = Mock.Of(); - var orchestrator = new RolePlayOrchestrator(admin, workflow); - var context = new OrchestrationContext - { - Candidates = [alice, bob], - ChatHistory = - [ - new TextMessage(Role.User, "Hello, Alice", from: "Bob"), - ], - }; - - var speaker = await orchestrator.GetNextSpeakerAsync(context); - speaker.Should().BeNull(); - } - - [Fact] - public async Task ItUseCandidatesFromWorflowAsync() - { - var workflow = new Graph(); - var alice = new EchoAgent("Alice"); - var bob = new EchoAgent("Bob"); - var charlie = new EchoAgent("Charlie"); - workflow.AddTransition(Transition.Create(alice, bob)); - workflow.AddTransition(Transition.Create(alice, charlie)); - - var admin = Mock.Of(); - Mock.Get(admin).Setup(x => x.GenerateReplyAsync( - It.IsAny>(), - It.IsAny(), - It.IsAny())) - .Callback, GenerateReplyOptions, CancellationToken>((messages, option, _) => - { - messages.First().IsSystemMessage().Should().BeTrue(); - - // verify prompt - var rolePlayPrompt = messages.First().GetContent(); - rolePlayPrompt.Should().Contain("Bob,Charlie"); - rolePlayPrompt.Should().Contain("From Bob:"); - option.StopSequence.Should().BeEquivalentTo([":"]); - option.Temperature.Should().Be(0); - option.MaxToken.Should().Be(128); - option.Functions.Should().BeEmpty(); - }) - .ReturnsAsync(new TextMessage(Role.Assistant, "From Bob")); - var orchestrator = new RolePlayOrchestrator(admin, workflow); - var context = new OrchestrationContext - { - Candidates = [alice, bob], - ChatHistory = - [ - new TextMessage(Role.User, "Hello, Bob", from: "Alice"), - ], - }; - - var speaker = await orchestrator.GetNextSpeakerAsync(context); - speaker.Should().Be(bob); - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task GPT_3_5_CoderReviewerRunnerTestAsync() - { - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - var openaiClient = new OpenAIClient(new Uri(endpoint), new Azure.AzureKeyCredential(key)); - var openAIChatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "assistant", - modelName: deployName) - .RegisterMessageConnector(); - - await CoderReviewerRunnerTestAsync(openAIChatAgent); - } - - [ApiKeyFact("OPENAI_API_KEY")] - public async Task GPT_4o_CoderReviewerRunnerTestAsync() - { - var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY"); - var model = "gpt-4o"; - var openaiClient = new OpenAIClient(apiKey); - var openAIChatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "assistant", - modelName: model) - .RegisterMessageConnector(); - - await CoderReviewerRunnerTestAsync(openAIChatAgent); - } - - [ApiKeyFact("OPENAI_API_KEY")] - public async Task GPT_4o_mini_CoderReviewerRunnerTestAsync() - { - var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY"); - var model = "gpt-4o-mini"; - var openaiClient = new OpenAIClient(apiKey); - var openAIChatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "assistant", - modelName: model) - .RegisterMessageConnector(); - - await CoderReviewerRunnerTestAsync(openAIChatAgent); - } - - - [ApiKeyFact("GOOGLE_GEMINI_API_KEY")] - public async Task GoogleGemini_1_5_flash_001_CoderReviewerRunnerTestAsync() - { - var apiKey = Environment.GetEnvironmentVariable("GOOGLE_GEMINI_API_KEY") ?? throw new InvalidOperationException("GOOGLE_GEMINI_API_KEY is not set"); - var geminiAgent = new GeminiChatAgent( - name: "gemini", - model: "gemini-1.5-flash-001", - apiKey: apiKey) - .RegisterMessageConnector(); - - await CoderReviewerRunnerTestAsync(geminiAgent); - } - - - [ApiKeyFact("ANTHROPIC_API_KEY")] - public async Task Claude3_Haiku_CoderReviewerRunnerTestAsync() - { - var apiKey = Environment.GetEnvironmentVariable("ANTHROPIC_API_KEY") ?? throw new Exception("Please set ANTHROPIC_API_KEY environment variable."); - var client = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, apiKey); - - var agent = new AnthropicClientAgent( - client, - name: "AnthropicAgent", - AnthropicConstants.Claude3Haiku, - systemMessage: "You are a helpful AI assistant that convert user message to upper case") - .RegisterMessageConnector(); - - await CoderReviewerRunnerTestAsync(agent); - } - - [ApiKeyFact("MISTRAL_API_KEY")] - public async Task Mistra_7b_CoderReviewerRunnerTestAsync() - { - var apiKey = Environment.GetEnvironmentVariable("MISTRAL_API_KEY") ?? throw new InvalidOperationException("MISTRAL_API_KEY is not set."); - var client = new MistralClient(apiKey: apiKey); - - var agent = new MistralClientAgent( - client: client, - name: "MistralClientAgent", - model: "open-mistral-7b") - .RegisterMessageConnector(); - - await CoderReviewerRunnerTestAsync(agent); - } - - [ApiKeyFact("GH_API_KEY")] - public async Task LLaMA_3_1_CoderReviewerRunnerTestAsync() - { - var apiKey = Environment.GetEnvironmentVariable("GH_API_KEY") ?? throw new InvalidOperationException("GH_API_KEY is not set."); - var endPoint = "https://models.inference.ai.azure.com"; - - var chatCompletionClient = new ChatCompletionsClient(new Uri(endPoint), new Azure.AzureKeyCredential(apiKey)); - var agent = new ChatCompletionsClientAgent( - chatCompletionsClient: chatCompletionClient, - name: "assistant", - modelName: "Meta-Llama-3.1-70B-Instruct") - .RegisterMessageConnector(); - - await CoderReviewerRunnerTestAsync(agent); - } - - /// - /// This test is to mimic the conversation among coder, reviewer and runner. - /// The coder will write the code, the reviewer will review the code, and the runner will run the code. - /// - /// - /// - public async Task CoderReviewerRunnerTestAsync(IAgent admin) - { - var coder = new EchoAgent("Coder"); - var reviewer = new EchoAgent("Reviewer"); - var runner = new EchoAgent("Runner"); - var user = new EchoAgent("User"); - var initializeMessage = new List - { - new TextMessage(Role.User, "Hello, I am user, I will provide the coding task, please write the code first, then review and run it", from: "User"), - new TextMessage(Role.User, "Hello, I am coder, I will write the code", from: "Coder"), - new TextMessage(Role.User, "Hello, I am reviewer, I will review the code", from: "Reviewer"), - new TextMessage(Role.User, "Hello, I am runner, I will run the code", from: "Runner"), - new TextMessage(Role.User, "how to print 'hello world' using C#", from: user.Name), - }; - - var chatHistory = new List() - { - new TextMessage(Role.User, """ - ```csharp - Console.WriteLine("Hello World"); - ``` - """, from: coder.Name), - new TextMessage(Role.User, "The code looks good", from: reviewer.Name), - new TextMessage(Role.User, "The code runs successfully, the output is 'Hello World'", from: runner.Name), - }; - - var orchestrator = new RolePlayOrchestrator(admin); - foreach (var message in chatHistory) - { - var context = new OrchestrationContext - { - Candidates = [coder, reviewer, runner, user], - ChatHistory = initializeMessage, - }; - - var speaker = await orchestrator.GetNextSpeakerAsync(context); - speaker!.Name.Should().Be(message.From); - initializeMessage.Add(message); - } - - // the last next speaker should be the user - var lastSpeaker = await orchestrator.GetNextSpeakerAsync(new OrchestrationContext - { - Candidates = [coder, reviewer, runner, user], - ChatHistory = initializeMessage, - }); - - lastSpeaker!.Name.Should().Be(user.Name); - } -} diff --git a/dotnet/test/AutoGen.Tests/Orchestrator/RoundRobinOrchestratorTests.cs b/dotnet/test/AutoGen.Tests/Orchestrator/RoundRobinOrchestratorTests.cs deleted file mode 100644 index cfa042f1bc..0000000000 --- a/dotnet/test/AutoGen.Tests/Orchestrator/RoundRobinOrchestratorTests.cs +++ /dev/null @@ -1,109 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// RoundRobinOrchestratorTests.cs - -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; -using FluentAssertions; -using Xunit; - -namespace AutoGen.Tests; - -public class RoundRobinOrchestratorTests -{ - [Fact] - public async Task ItReturnNextAgentAsync() - { - var orchestrator = new RoundRobinOrchestrator(); - var context = new OrchestrationContext - { - Candidates = new List - { - new EchoAgent("Alice"), - new EchoAgent("Bob"), - new EchoAgent("Charlie"), - }, - }; - - var messages = new List - { - new TextMessage(Role.User, "Hello, Alice", from: "Alice"), - new TextMessage(Role.User, "Hello, Bob", from: "Bob"), - new TextMessage(Role.User, "Hello, Charlie", from: "Charlie"), - }; - - var expected = new List { "Bob", "Charlie", "Alice" }; - - var zip = messages.Zip(expected); - - foreach (var (msg, expect) in zip) - { - context.ChatHistory = [msg]; - var nextSpeaker = await orchestrator.GetNextSpeakerAsync(context); - Assert.Equal(expect, nextSpeaker!.Name); - } - } - - [Fact] - public async Task ItReturnNullIfNoCandidates() - { - var orchestrator = new RoundRobinOrchestrator(); - var context = new OrchestrationContext - { - Candidates = new List(), - ChatHistory = new List - { - new TextMessage(Role.User, "Hello, Alice", from: "Alice"), - }, - }; - - var result = await orchestrator.GetNextSpeakerAsync(context); - Assert.Null(result); - } - - [Fact] - public async Task ItReturnNullIfLastMessageIsNotFromCandidates() - { - var orchestrator = new RoundRobinOrchestrator(); - var context = new OrchestrationContext - { - Candidates = new List - { - new EchoAgent("Alice"), - new EchoAgent("Bob"), - new EchoAgent("Charlie"), - }, - ChatHistory = new List - { - new TextMessage(Role.User, "Hello, David", from: "David"), - }, - }; - - var result = await orchestrator.GetNextSpeakerAsync(context); - result.Should().BeNull(); - } - - [Fact] - public async Task ItReturnTheFirstAgentInTheListIfNoChatHistory() - { - var orchestrator = new RoundRobinOrchestrator(); - var context = new OrchestrationContext - { - Candidates = new List - { - new EchoAgent("Alice"), - new EchoAgent("Bob"), - new EchoAgent("Charlie"), - }, - }; - - var result = await orchestrator.GetNextSpeakerAsync(context); - result!.Name.Should().Be("Alice"); - } -} diff --git a/dotnet/test/AutoGen.Tests/Orchestrator/WorkflowOrchestratorTests.cs b/dotnet/test/AutoGen.Tests/Orchestrator/WorkflowOrchestratorTests.cs deleted file mode 100644 index 5aab435f26..0000000000 --- a/dotnet/test/AutoGen.Tests/Orchestrator/WorkflowOrchestratorTests.cs +++ /dev/null @@ -1,118 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// WorkflowOrchestratorTests.cs - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; -using FluentAssertions; -using Xunit; - -namespace AutoGen.Tests; - -public class WorkflowOrchestratorTests -{ - [Fact] - public async Task ItReturnNextAgentAsync() - { - var workflow = new Graph(); - var alice = new EchoAgent("Alice"); - var bob = new EchoAgent("Bob"); - var charlie = new EchoAgent("Charlie"); - workflow.AddTransition(Transition.Create(alice, bob)); - workflow.AddTransition(Transition.Create(bob, charlie)); - workflow.AddTransition(Transition.Create(charlie, alice)); - var orchestrator = new WorkflowOrchestrator(workflow); - var context = new OrchestrationContext - { - Candidates = [alice, bob, charlie] - }; - - var messages = new List - { - new TextMessage(Role.User, "Hello, Alice", from: "Alice"), - new TextMessage(Role.User, "Hello, Bob", from: "Bob"), - new TextMessage(Role.User, "Hello, Charlie", from: "Charlie"), - }; - - var expected = new List { "Bob", "Charlie", "Alice" }; - - var zip = messages.Zip(expected); - - foreach (var (msg, expect) in zip) - { - context.ChatHistory = [msg]; - var result = await orchestrator.GetNextSpeakerAsync(context); - Assert.Equal(expect, result!.Name); - } - } - - [Fact] - public async Task ItReturnNullIfNoCandidates() - { - var workflow = new Graph(); - var orchestrator = new WorkflowOrchestrator(workflow); - var context = new OrchestrationContext - { - Candidates = new List(), - ChatHistory = new List - { - new TextMessage(Role.User, "Hello, Alice", from: "Alice"), - }, - }; - - var nextAgent = await orchestrator.GetNextSpeakerAsync(context); - nextAgent.Should().BeNull(); - } - - [Fact] - public async Task ItReturnNullIfNoAgentIsAvailableFromWorkflowAsync() - { - var workflow = new Graph(); - var alice = new EchoAgent("Alice"); - var bob = new EchoAgent("Bob"); - workflow.AddTransition(Transition.Create(alice, bob)); - var orchestrator = new WorkflowOrchestrator(workflow); - var context = new OrchestrationContext - { - Candidates = [alice, bob], - ChatHistory = new List - { - new TextMessage(Role.User, "Hello, Bob", from: "Bob"), - }, - }; - - var nextSpeaker = await orchestrator.GetNextSpeakerAsync(context); - nextSpeaker.Should().BeNull(); - } - - [Fact] - public async Task ItThrowExceptionWhenMoreThanOneAvailableAgentsFromWorkflowAsync() - { - var workflow = new Graph(); - var alice = new EchoAgent("Alice"); - var bob = new EchoAgent("Bob"); - var charlie = new EchoAgent("Charlie"); - workflow.AddTransition(Transition.Create(alice, bob)); - workflow.AddTransition(Transition.Create(alice, charlie)); - var orchestrator = new WorkflowOrchestrator(workflow); - var context = new OrchestrationContext - { - Candidates = [alice, bob, charlie], - ChatHistory = new List - { - new TextMessage(Role.User, "Hello, Bob", from: "Alice"), - }, - }; - - var action = async () => await orchestrator.GetNextSpeakerAsync(context); - - await action.Should().ThrowExactlyAsync().WithMessage("There are more than one available agents from the workflow for the next speaker."); - } -} diff --git a/dotnet/test/AutoGen.Tests/SingleAgentTest.cs b/dotnet/test/AutoGen.Tests/SingleAgentTest.cs deleted file mode 100644 index e7190d657d..0000000000 --- a/dotnet/test/AutoGen.Tests/SingleAgentTest.cs +++ /dev/null @@ -1,378 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// SingleAgentTest.cs - -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Threading.Tasks; -using AutoGen.LMStudio; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -using FluentAssertions; -using Xunit; -using Xunit.Abstractions; - -namespace AutoGen.Tests -{ - public partial class SingleAgentTest - { - private ITestOutputHelper _output; - public SingleAgentTest(ITestOutputHelper output) - { - _output = output; - } - - private ILLMConfig CreateAzureOpenAIGPT35TurboConfig() - { - var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new ArgumentException("AZURE_OPENAI_API_KEY is not set"); - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new ArgumentException("AZURE_OPENAI_ENDPOINT is not set"); - var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new ArgumentException("AZURE_OPENAI_DEPLOY_NAME is not set"); - return new AzureOpenAIConfig(endpoint, deployName, key); - } - - private ILLMConfig CreateOpenAIGPT4VisionConfig() - { - var key = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new ArgumentException("OPENAI_API_KEY is not set"); - return new OpenAIConfig(key, "gpt-4-vision-preview"); - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task GPTAgentTestAsync() - { - var config = this.CreateAzureOpenAIGPT35TurboConfig(); - - var agent = new GPTAgent("gpt", "You are a helpful AI assistant", config); - - await UpperCaseTestAsync(agent); - await UpperCaseStreamingTestAsync(agent); - } - - [ApiKeyFact("OPENAI_API_KEY", "AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT")] - public async Task GPTAgentVisionTestAsync() - { - var visionConfig = this.CreateOpenAIGPT4VisionConfig(); - var visionAgent = new GPTAgent( - name: "gpt", - systemMessage: "You are a helpful AI assistant", - config: visionConfig, - temperature: 0); - - var gpt3Config = this.CreateAzureOpenAIGPT35TurboConfig(); - var gpt3Agent = new GPTAgent( - name: "gpt3", - systemMessage: "You are a helpful AI assistant, return highest label from conversation", - config: gpt3Config, - temperature: 0, - functions: new[] { this.GetHighestLabelFunctionContract.ToOpenAIFunctionDefinition() }, - functionMap: new Dictionary>> - { - { nameof(GetHighestLabel), this.GetHighestLabelWrapper }, - }); - - var imageUri = new Uri(@"https://ag2ai.github.io/autogen/assets/images/level2algebra-659ba95286432d9945fc89e84d606797.png"); - var oaiMessage = new ChatRequestUserMessage( - new ChatMessageTextContentItem("which label has the highest inference cost"), - new ChatMessageImageContentItem(imageUri)); - var multiModalMessage = new MultiModalMessage(Role.User, - [ - new TextMessage(Role.User, "which label has the highest inference cost", from: "user"), - new ImageMessage(Role.User, imageUri, from: "user"), - ], - from: "user"); - - var imageMessage = new ImageMessage(Role.User, imageUri, from: "user"); - - string imagePath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "ApprovalTests", "square.png"); - ImageMessage imageMessageData; - using (var fs = new FileStream(imagePath, FileMode.Open, FileAccess.Read)) - { - var ms = new MemoryStream(); - await fs.CopyToAsync(ms); - ms.Seek(0, SeekOrigin.Begin); - var imageData = await BinaryData.FromStreamAsync(ms, "image/png"); - imageMessageData = new ImageMessage(Role.Assistant, imageData, from: "user"); - } - - IMessage[] messages = [ - MessageEnvelope.Create(oaiMessage), - multiModalMessage, - imageMessage, - imageMessageData - ]; - - foreach (var message in messages) - { - var response = await visionAgent.SendAsync(message); - response.From.Should().Be(visionAgent.Name); - - var labelResponse = await gpt3Agent.SendAsync(response); - labelResponse.From.Should().Be(gpt3Agent.Name); - labelResponse.GetToolCalls()!.First().FunctionName.Should().Be(nameof(GetHighestLabel)); - } - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task GPTFunctionCallAgentTestAsync() - { - var config = this.CreateAzureOpenAIGPT35TurboConfig(); - var agentWithFunction = new GPTAgent("gpt", "You are a helpful AI assistant", config, 0, functions: new[] { this.EchoAsyncFunctionContract.ToOpenAIFunctionDefinition() }); - - await EchoFunctionCallTestAsync(agentWithFunction); - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task AssistantAgentFunctionCallTestAsync() - { - var config = this.CreateAzureOpenAIGPT35TurboConfig(); - - var llmConfig = new ConversableAgentConfig - { - Temperature = 0, - FunctionContracts = new[] - { - this.EchoAsyncFunctionContract, - }, - ConfigList = new[] - { - config, - }, - }; - - var assistantAgent = new AssistantAgent( - name: "assistant", - llmConfig: llmConfig); - - await EchoFunctionCallTestAsync(assistantAgent); - } - - [Fact] - public async Task ItCreateAssistantAgentFromLMStudioConfigAsync() - { - var host = "http://localhost"; - var port = 8080; - var lmStudioConfig = new LMStudioConfig(host, port); - - var assistantAgent = new AssistantAgent( - name: "assistant", - llmConfig: new ConversableAgentConfig() - { - ConfigList = [lmStudioConfig], - }); - - assistantAgent.Name.Should().Be("assistant"); - assistantAgent.InnerAgent.Should().BeOfType(); - } - - [ApiKeyFact("LMStudio_ENDPOINT")] - public async Task ItTestAssistantAgentFromLMStudioConfigAsync() - { - var Uri = Environment.GetEnvironmentVariable("LMStudio_ENDPOINT") ?? throw new ArgumentException("LMStudio_ENDPOINT is not set"); - var lmStudioConfig = new LMStudioConfig(new Uri(Uri)); - - var assistantAgent = new AssistantAgent( - name: "assistant", - llmConfig: new ConversableAgentConfig() - { - ConfigList = [lmStudioConfig], - }); - - assistantAgent.Name.Should().Be("assistant"); - assistantAgent.InnerAgent.Should().BeOfType(); - await this.UpperCaseTestAsync(assistantAgent); - } - - - [Fact] - public async Task AssistantAgentDefaultReplyTestAsync() - { - var assistantAgent = new AssistantAgent( - llmConfig: null, - name: "assistant", - defaultReply: "hello world"); - - var reply = await assistantAgent.SendAsync("hi"); - - reply.GetContent().Should().Be("hello world"); - reply.GetRole().Should().Be(Role.Assistant); - reply.From.Should().Be(assistantAgent.Name); - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task AssistantAgentFunctionCallSelfExecutionTestAsync() - { - var config = this.CreateAzureOpenAIGPT35TurboConfig(); - var llmConfig = new ConversableAgentConfig - { - FunctionContracts = new[] - { - this.EchoAsyncFunctionContract, - }, - ConfigList = new[] - { - config, - }, - }; - var assistantAgent = new AssistantAgent( - name: "assistant", - llmConfig: llmConfig, - functionMap: new Dictionary>> - { - { nameof(EchoAsync), this.EchoAsyncWrapper }, - }); - - await EchoFunctionCallExecutionTestAsync(assistantAgent); - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task GPTAgentFunctionCallSelfExecutionTestAsync() - { - var config = this.CreateAzureOpenAIGPT35TurboConfig(); - var agent = new GPTAgent( - name: "gpt", - systemMessage: "You are a helpful AI assistant", - config: config, - temperature: 0, - functions: new[] { this.EchoAsyncFunctionContract.ToOpenAIFunctionDefinition() }, - functionMap: new Dictionary>> - { - { nameof(EchoAsync), this.EchoAsyncWrapper }, - }); - - await EchoFunctionCallExecutionStreamingTestAsync(agent); - await EchoFunctionCallExecutionTestAsync(agent); - } - - /// - /// echo when asked. - /// - /// message to echo - [FunctionAttribute] - public async Task EchoAsync(string message) - { - return $"[ECHO] {message}"; - } - - /// - /// return the label name with hightest inference cost - /// - /// - /// - [FunctionAttribute] - public async Task GetHighestLabel(string labelName, string color) - { - return $"[HIGHEST_LABEL] {labelName} {color}"; - } - - public async Task EchoFunctionCallTestAsync(IAgent agent) - { - //var message = new TextMessage(Role.System, "You are a helpful AI assistant that call echo function"); - var helloWorld = new TextMessage(Role.User, "echo Hello world"); - - var reply = await agent.SendAsync(chatHistory: new[] { helloWorld }); - - reply.From.Should().Be(agent.Name); - reply.GetToolCalls()!.First().FunctionName.Should().Be(nameof(EchoAsync)); - } - - public async Task EchoFunctionCallExecutionTestAsync(IAgent agent) - { - //var message = new TextMessage(Role.System, "You are a helpful AI assistant that echo whatever user says"); - var helloWorld = new TextMessage(Role.User, "echo Hello world"); - - var reply = await agent.SendAsync(chatHistory: new[] { helloWorld }); - - reply.GetContent().Should().Be("[ECHO] Hello world"); - reply.From.Should().Be(agent.Name); - reply.Should().BeOfType(); - } - - public async Task EchoFunctionCallExecutionStreamingTestAsync(IStreamingAgent agent) - { - //var message = new TextMessage(Role.System, "You are a helpful AI assistant that echo whatever user says"); - var helloWorld = new TextMessage(Role.User, "echo Hello world"); - var option = new GenerateReplyOptions - { - Temperature = 0, - }; - var replyStream = agent.GenerateStreamingReplyAsync(messages: new[] { helloWorld }, option); - var answer = "[ECHO] Hello world"; - IMessage? finalReply = default; - await foreach (var reply in replyStream) - { - reply.From.Should().Be(agent.Name); - finalReply = reply; - } - - if (finalReply is ToolCallAggregateMessage aggregateMessage) - { - var toolCallResultMessage = aggregateMessage.Message2; - toolCallResultMessage.ToolCalls.First().Result.Should().Be(answer); - toolCallResultMessage.From.Should().Be(agent.Name); - toolCallResultMessage.ToolCalls.First().FunctionName.Should().Be(nameof(EchoAsync)); - } - else - { - throw new Exception("unexpected message type"); - } - } - - public async Task UpperCaseTestAsync(IAgent agent) - { - var message = new TextMessage(Role.User, "Please convert abcde to upper case."); - - var reply = await agent.SendAsync(chatHistory: new[] { message }); - - reply.GetContent().Should().Contain("ABCDE"); - reply.From.Should().Be(agent.Name); - } - - public async Task UpperCaseStreamingTestAsync(IStreamingAgent agent) - { - var message = new TextMessage(Role.User, "Please convert 'hello world' to upper case"); - var option = new GenerateReplyOptions - { - Temperature = 0, - }; - var replyStream = agent.GenerateStreamingReplyAsync(messages: new[] { message }, option); - var answer = "HELLO WORLD"; - TextMessage? finalReply = default; - await foreach (var reply in replyStream) - { - if (reply is TextMessageUpdate update) - { - update.From.Should().Be(agent.Name); - - if (finalReply is null) - { - finalReply = new TextMessage(update); - } - else - { - finalReply.Update(update); - } - - continue; - } - else if (reply is TextMessage textMessage) - { - finalReply = textMessage; - continue; - } - - throw new Exception("unexpected message type"); - } - - finalReply!.Content.Should().Contain(answer); - finalReply!.Role.Should().Be(Role.Assistant); - finalReply!.From.Should().Be(agent.Name); - } - } -} diff --git a/dotnet/test/AutoGen.Tests/TwoAgentTest.cs b/dotnet/test/AutoGen.Tests/TwoAgentTest.cs deleted file mode 100644 index c6cec9cd16..0000000000 --- a/dotnet/test/AutoGen.Tests/TwoAgentTest.cs +++ /dev/null @@ -1,112 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// TwoAgentTest.cs -#pragma warning disable xUnit1013 -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; -using AutoGen.OpenAI.V1; -using FluentAssertions; -using Xunit.Abstractions; - -namespace AutoGen.Tests; - -public partial class TwoAgentTest -{ - private ITestOutputHelper _output; - public TwoAgentTest(ITestOutputHelper output) - { - _output = output; - } - - [Function] - public async Task GetWeather(string city) - { - return $"[GetWeatherFunction] The weather in {city} is sunny"; - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task TwoAgentWeatherChatTestAsync() - { - var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new ArgumentException("AZURE_OPENAI_API_KEY is not set"); - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new ArgumentException("AZURE_OPENAI_ENDPOINT is not set"); - var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new ArgumentException("AZURE_OPENAI_DEPLOY_NAME is not set"); - var config = new AzureOpenAIConfig(endpoint, deploymentName, key); - - var assistant = new AssistantAgent( - "assistant", - llmConfig: new ConversableAgentConfig - { - ConfigList = new[] { config }, - FunctionContracts = new[] - { - this.GetWeatherFunctionContract, - }, - }) - .RegisterMiddleware(async (msgs, option, agent, ct) => - { - var reply = await agent.GenerateReplyAsync(msgs, option, ct); - var format = reply.FormatMessage(); - _output.WriteLine(format); - - return reply; - }); - - var user = new UserProxyAgent( - name: "user", - functionMap: new Dictionary>> - { - { this.GetWeatherFunctionContract.Name, this.GetWeatherWrapper }, - }) - .RegisterMiddleware(async (msgs, option, agent, ct) => - { - var lastMessage = msgs.Last(); - if (lastMessage.GetToolCalls()?.FirstOrDefault()?.FunctionName != null) - { - return await agent.GenerateReplyAsync(msgs, option, ct); - } - else - { - // terminate message - return new TextMessage(Role.Assistant, GroupChatExtension.TERMINATE); - } - }) - .RegisterMiddleware(async (msgs, option, agent, ct) => - { - var reply = await agent.GenerateReplyAsync(msgs, option, ct); - var format = reply.FormatMessage(); - _output.WriteLine(format); - - return reply; - }); - - var chatHistory = (await user.InitiateChatAsync(assistant, "what's weather in New York", 10)).ToArray(); - - // the last message should be terminated message - chatHistory.Last().IsGroupChatTerminateMessage().Should().BeTrue(); - - // the third last message should be the weather message from function - chatHistory[^3].GetContent().Should().Be("[GetWeatherFunction] The weather in New York is sunny"); - - // the # of messages should be 5 - chatHistory.Length.Should().Be(5); - } - - public async Task TwoAgentGetWeatherFunctionCallTestAsync(IAgent user, IAgent assistant) - { - var question = new TextMessage(Role.Assistant, "what's the weather in Seattle", from: user.Name); - var assistantReply = await assistant.SendAsync(question); - assistantReply.Should().BeOfType(); - var toolCallResult = await user.SendAsync(chatHistory: [question, assistantReply]); - toolCallResult.Should().BeOfType(); - var finalReply = await assistant.SendAsync(chatHistory: [question, assistantReply, toolCallResult]); - finalReply.Should().BeOfType(); - finalReply.GetContent()!.ToLower().Should().Contain("sunny"); - } -} diff --git a/dotnet/test/AutoGen.Tests/WorkflowTest.cs b/dotnet/test/AutoGen.Tests/WorkflowTest.cs deleted file mode 100644 index 8834fd7665..0000000000 --- a/dotnet/test/AutoGen.Tests/WorkflowTest.cs +++ /dev/null @@ -1,76 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// WorkflowTest.cs - -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; -using FluentAssertions; -using Xunit; - -namespace AutoGen.Tests; - -public class WorkflowTest -{ - [Fact] - public async Task TransitionTestAsync() - { - var alice = new EchoAgent("alice"); - var bob = new EchoAgent("bob"); - - var aliceToBob = Transition.Create(alice, bob, async (from, to, messages, _) => - { - if (messages.Any(m => m.GetContent() == "Hello")) - { - return true; - } - - return false; - }); - - var canTransit = await aliceToBob.CanTransitionAsync([]); - canTransit.Should().BeFalse(); - - canTransit = await aliceToBob.CanTransitionAsync([new TextMessage(Role.Assistant, "Hello")]); - canTransit.Should().BeTrue(); - - // if no function is provided, it should always return true - var aliceToBobNoFunction = Transition.Create(alice, bob); - canTransit = await aliceToBobNoFunction.CanTransitionAsync(new[] { new TextMessage(Role.Assistant, "Hello") }); - canTransit.Should().BeTrue(); - } - - [Fact] - public async Task WorkflowBasicTestAsync() - { - var alice = new EchoAgent("alice"); - var bob = new EchoAgent("bob"); - var charlie = new EchoAgent("charlie"); - - // alice can speak to bob - // bob can speak to charlie - // charlie can speak to alice - - var aliceToBob = Transition.Create(alice, bob); - var bobToCharlie = Transition.Create(bob, charlie); - var charlieToAlice = Transition.Create(charlie, alice); - var workflow = new Graph([aliceToBob, bobToCharlie, charlieToAlice]); - IAgent currentAgent = alice; - var agentNames = new List(); - do - { - agentNames.Add(currentAgent.Name!); - var nextAgents = await workflow.TransitToNextAvailableAgentsAsync(currentAgent, []); - nextAgents.Count().Should().Be(1); - currentAgent = nextAgents.First(); - } - while (currentAgent != alice); - - agentNames.Should().BeEquivalentTo(["alice", "bob", "charlie"]); - } -} diff --git a/dotnet/test/AutoGen.WebAPI.Tests/AutoGen.WebAPI.Tests.csproj b/dotnet/test/AutoGen.WebAPI.Tests/AutoGen.WebAPI.Tests.csproj deleted file mode 100644 index 7ec6c408cf..0000000000 --- a/dotnet/test/AutoGen.WebAPI.Tests/AutoGen.WebAPI.Tests.csproj +++ /dev/null @@ -1,28 +0,0 @@ -ο»Ώ - - - $(TestTargetFrameworks) - enable - enable - false - true - True - - - - - - - - - - - - - - - - - - - diff --git a/dotnet/test/AutoGen.WebAPI.Tests/EchoAgent.cs b/dotnet/test/AutoGen.WebAPI.Tests/EchoAgent.cs deleted file mode 100644 index fdcc65e89a..0000000000 --- a/dotnet/test/AutoGen.WebAPI.Tests/EchoAgent.cs +++ /dev/null @@ -1,51 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// EchoAgent.cs - -using System.Runtime.CompilerServices; -using AutoGen.Core; - -namespace AutoGen.WebAPI.Tests; - -public class EchoAgent : IStreamingAgent -{ - public EchoAgent(string name) - { - Name = name; - } - public string Name { get; } - - public async Task GenerateReplyAsync( - IEnumerable messages, - GenerateReplyOptions? options = null, - CancellationToken cancellationToken = default) - { - return messages.Last(); - } - - public async IAsyncEnumerable GenerateStreamingReplyAsync( - IEnumerable messages, - GenerateReplyOptions? options = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var lastMessage = messages.LastOrDefault(); - if (lastMessage == null) - { - yield break; - } - - // return each character of the last message as a separate message - if (lastMessage.GetContent() is string content) - { - foreach (var c in content) - { - yield return new TextMessageUpdate(Role.Assistant, c.ToString(), this.Name); - } - } - } -} diff --git a/dotnet/test/AutoGen.WebAPI.Tests/OpenAIChatCompletionMiddlewareTests.cs b/dotnet/test/AutoGen.WebAPI.Tests/OpenAIChatCompletionMiddlewareTests.cs deleted file mode 100644 index a6654cfd1a..0000000000 --- a/dotnet/test/AutoGen.WebAPI.Tests/OpenAIChatCompletionMiddlewareTests.cs +++ /dev/null @@ -1,94 +0,0 @@ -ο»Ώ// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -// SPDX-License-Identifier: Apache-2.0 -// Contributions to this project, i.e., https://github.com/ag2ai/ag2, -// are licensed under the Apache License, Version 2.0 (Apache-2.0). -// Portions derived from https://github.com/microsoft/autogen under the MIT License. -// SPDX-License-Identifier: MIT -// Copyright (c) Microsoft Corporation. All rights reserved. -// OpenAIChatCompletionMiddlewareTests.cs - -using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -using Azure.Core.Pipeline; -using FluentAssertions; -using Microsoft.AspNetCore.Hosting; -using Microsoft.AspNetCore.TestHost; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Hosting; - -namespace AutoGen.WebAPI.Tests; - -public class OpenAIChatCompletionMiddlewareTests -{ - [Fact] - public async Task ItReturnTextMessageWhenSendTextMessage() - { - var agent = new EchoAgent("test"); - var hostBuilder = CreateHostBuilder(agent); - using var host = await hostBuilder.StartAsync(); - var client = host.GetTestClient(); - var openaiClient = CreateOpenAIClient(client); - var openAIAgent = new OpenAIChatAgent(openaiClient, "test", "test") - .RegisterMessageConnector(); - - var response = await openAIAgent.SendAsync("Hey"); - - response.GetContent().Should().Be("Hey"); - response.Should().BeOfType(); - response.From.Should().Be("test"); - } - - [Fact] - public async Task ItReturnTextMessageWhenSendTextMessageUseStreaming() - { - var agent = new EchoAgent("test"); - var hostBuilder = CreateHostBuilder(agent); - using var host = await hostBuilder.StartAsync(); - var client = host.GetTestClient(); - var openaiClient = CreateOpenAIClient(client); - var openAIAgent = new OpenAIChatAgent(openaiClient, "test", "test") - .RegisterMessageConnector(); - - var message = new TextMessage(Role.User, "ABCDEFGHIJKLMN"); - var chunks = new List(); - await foreach (var chunk in openAIAgent.GenerateStreamingReplyAsync([message])) - { - chunk.Should().BeOfType(); - chunks.Add(chunk); - } - - var mergedChunks = string.Join("", chunks.Select(c => c.GetContent())); - mergedChunks.Should().Be("ABCDEFGHIJKLMN"); - chunks.Count.Should().Be(14); - } - - private IHostBuilder CreateHostBuilder(IAgent agent) - { - return new HostBuilder() - .ConfigureWebHost(webHost => - { - webHost.UseTestServer(); - webHost.Configure(app => - { - app.UseAgentAsOpenAIChatCompletionEndpoint(agent); - }); - }); - } - - private OpenAIClient CreateOpenAIClient(HttpClient client) - { - var clientOption = new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2024_02_15_Preview) - { - Transport = new HttpClientTransport(client), - }; - return new OpenAIClient("api-key", clientOption); - } -} diff --git a/dotnet/website/.gitignore b/dotnet/website/.gitignore deleted file mode 100644 index 8d5bc9f449..0000000000 --- a/dotnet/website/.gitignore +++ /dev/null @@ -1,12 +0,0 @@ -############### -# folder # -############### -/**/DROP/ -/**/TEMP/ -/**/packages/ -/**/bin/ -/**/obj/ - -# build artifacts for web -_site/ -api/ diff --git a/dotnet/website/README.md b/dotnet/website/README.md deleted file mode 100644 index fd587ad280..0000000000 --- a/dotnet/website/README.md +++ /dev/null @@ -1,13 +0,0 @@ -## How to build and run the website - -### Prerequisites -- dotnet 7.0 or later - -### Build -Firstly, go to autogen/dotnet folder and run the following command to build the website: -```bash -dotnet tool restore -dotnet tool run docfx website/docfx.json --serve -``` - -After the command is executed, you can open your browser and navigate to `http://localhost:8080` to view the website. \ No newline at end of file diff --git a/dotnet/website/articles/Agent-overview.md b/dotnet/website/articles/Agent-overview.md deleted file mode 100644 index 586d231a6e..0000000000 --- a/dotnet/website/articles/Agent-overview.md +++ /dev/null @@ -1,43 +0,0 @@ -`Agent` is one of the most fundamental concepts in AutoGen.Net. In AutoGen.Net, you construct a single agent to process a specific task, and you extend an agent using [Middlewares](./Middleware-overview.md), and you construct a multi-agent workflow using [GroupChat](./Group-chat-overview.md). - -> [!NOTE] -> Every agent in AutoGen.Net implements @AutoGen.Core.IAgent, for agent that supports streaming reply, it also implements @AutoGen.Core.IStreamingAgent. - -## Create an agent -- Create an @AutoGen.AssistantAgent: [Create an assistant agent](./Create-an-agent.md) -- Create an @AutoGen.OpenAI.OpenAIChatAgent: [Create an OpenAI chat agent](./OpenAIChatAgent-simple-chat.md) -- Create a @AutoGen.SemanticKernel.SemanticKernelAgent: [Create a semantic kernel agent](./AutoGen.SemanticKernel/SemanticKernelAgent-simple-chat.md) -- Create a @AutoGen.LMStudio.LMStudioAgent: [Connect to LM Studio](./Consume-LLM-server-from-LM-Studio.md) - -## Chat with an agent -To chat with an agent, typically you can invoke @AutoGen.Core.IAgent.GenerateReplyAsync*. On top of that, you can also use one of the extension methods like @AutoGen.Core.AgentExtension.SendAsync* as shortcuts. - -> [!NOTE] -> AutoGen provides a list of built-in message types like @AutoGen.Core.TextMessage, @AutoGen.Core.ImageMessage, @AutoGen.Core.MultiModalMessage, @AutoGen.Core.ToolCallMessage, @AutoGen.Core.ToolCallResultMessage, etc. You can use these message types to chat with an agent. For further details, see [built-in messages](./Built-in-messages.md). - -- Send a @AutoGen.Core.TextMessage to an agent via @AutoGen.Core.IAgent.GenerateReplyAsync*: -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/AgentCodeSnippet.cs?name=ChatWithAnAgent_GenerateReplyAsync)] - -- Send a message to an agent via @AutoGen.Core.AgentExtension.SendAsync*: -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/AgentCodeSnippet.cs?name=ChatWithAnAgent_SendAsync)] - -## Streaming chat -If an agent implements @AutoGen.Core.IStreamingAgent, you can use @AutoGen.Core.IStreamingAgent.GenerateStreamingReplyAsync* to chat with the agent in a streaming way. You would need to process the streaming updates on your side though. - -- Send a @AutoGen.Core.TextMessage to an agent via @AutoGen.Core.IStreamingAgent.GenerateStreamingReplyAsync*, and print the streaming updates to console: -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/AgentCodeSnippet.cs?name=ChatWithAnAgent_GenerateStreamingReplyAsync)] - -## Register middleware to an agent -@AutoGen.Core.IMiddleware and @AutoGen.Core.IStreamingMiddleware are used to extend the behavior of @AutoGen.Core.IAgent.GenerateReplyAsync* and @AutoGen.Core.IStreamingAgent.GenerateStreamingReplyAsync*. You can register middleware to an agent to customize the behavior of the agent on things like function call support, converting message of different types, print message, gather user input, etc. - -- Middleware overview: [Middleware overview](./Middleware-overview.md) -- Write message to console: [Print message middleware](./Print-message-middleware.md) -- Convert message type: [SemanticKernelChatMessageContentConnector](./AutoGen.SemanticKernel/SemanticKernelAgent-support-more-messages.md) and [OpenAIChatRequestMessageConnector](./OpenAIChatAgent-support-more-messages.md) -- Create your own middleware: [Create your own middleware](./Create-your-own-middleware.md) - -## Group chat -You can construct a multi-agent workflow using @AutoGen.Core.IGroupChat. In AutoGen.Net, there are two type of group chat: -@AutoGen.Core.SequentialGroupChat: Orchestrates the agents in the group chat in a fix, sequential order. -@AutoGen.Core.GroupChat: Provide more dynamic yet controllable way to orchestrate the agents in the group chat. - -For further details, see [Group chat overview](./Group-chat-overview.md). \ No newline at end of file diff --git a/dotnet/website/articles/AutoGen-Mistral-Overview.md b/dotnet/website/articles/AutoGen-Mistral-Overview.md deleted file mode 100644 index df5e154d05..0000000000 --- a/dotnet/website/articles/AutoGen-Mistral-Overview.md +++ /dev/null @@ -1,26 +0,0 @@ -## AutoGen.Mistral overview - -AutoGen.Mistral provides the following agent(s) to connect to [Mistral.AI](https://mistral.ai/) platform. -- @AutoGen.Mistral.MistralClientAgent: A slim wrapper agent over @AutoGen.Mistral.MistralClient. - -### Get started with AutoGen.Mistral - -To get started with AutoGen.Mistral, follow the [installation guide](Installation.md) to make sure you add the AutoGen feed correctly. Then add the `AutoGen.Mistral` package to your project file. - -```bash -dotnet add package AutoGen.Mistral -``` - ->[!NOTE] -> You need to provide an api-key to use Mistral models which will bring additional cost while using. you can get the api key from [Mistral.AI](https://mistral.ai/). - -### Example - -Import the required namespace -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/MistralAICodeSnippet.cs?name=using_statement)] - -Create a @AutoGen.Mistral.MistralClientAgent and start chatting! -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/MistralAICodeSnippet.cs?name=create_mistral_agent)] - -Use @AutoGen.Core.IStreamingAgent.GenerateStreamingReplyAsync* to stream the chat completion. -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/MistralAICodeSnippet.cs?name=streaming_chat)] \ No newline at end of file diff --git a/dotnet/website/articles/AutoGen-OpenAI-Overview.md b/dotnet/website/articles/AutoGen-OpenAI-Overview.md deleted file mode 100644 index f46cbcc455..0000000000 --- a/dotnet/website/articles/AutoGen-OpenAI-Overview.md +++ /dev/null @@ -1,17 +0,0 @@ -## AutoGen.OpenAI Overview - -AutoGen.OpenAI provides the following agents over openai models: -- @AutoGen.OpenAI.OpenAIChatAgent: A slim wrapper agent over `OpenAIClient`. This agent only support `IMessage` message type. To support more message types like @AutoGen.Core.TextMessage, register the agent with @AutoGen.OpenAI.OpenAIChatRequestMessageConnector. -- @AutoGen.OpenAI.GPTAgent: An agent that build on top of @AutoGen.OpenAI.OpenAIChatAgent with more message types support like @AutoGen.Core.TextMessage, @AutoGen.Core.ImageMessage, @AutoGen.Core.MultiModalMessage and function call support. Essentially, it is equivalent to @AutoGen.OpenAI.OpenAIChatAgent with @AutoGen.Core.FunctionCallMiddleware and @AutoGen.OpenAI.OpenAIChatRequestMessageConnector registered. - -### Get start with AutoGen.OpenAI - -To get start with AutoGen.OpenAI, firstly, follow the [installation guide](Installation.md) to make sure you add the AutoGen feed correctly. Then add `AutoGen.OpenAI` package to your project file. - -```xml - - - -``` - - diff --git a/dotnet/website/articles/AutoGen.Gemini/Chat-with-google-gemini.md b/dotnet/website/articles/AutoGen.Gemini/Chat-with-google-gemini.md deleted file mode 100644 index 45c089bbd2..0000000000 --- a/dotnet/website/articles/AutoGen.Gemini/Chat-with-google-gemini.md +++ /dev/null @@ -1,31 +0,0 @@ -This example shows how to use @AutoGen.Gemini.GeminiChatAgent to connect to Google AI Gemini and chat with Gemini model. - -To run this example, you need to have a Google AI Gemini API key. For how to get a Google Gemini API key, please refer to [Google Gemini](https://gemini.google.com/). - -> [!NOTE] -> You can find the complete sample code [here](https://github.com/ag2ai/ag2/blob/main/dotnet/sample/AutoGen.Gemini.Sample/Chat_With_Google_Gemini.cs) - -> [!NOTE] -> What's the difference between Google AI Gemini and Vertex AI Gemini? -> -> Gemini is a series of large language models developed by Google. You can use it either from Google AI API or Vertex AI API. If you are relatively new to Gemini and wants to explore the feature and build some prototype for your chatbot app, Google AI APIs (with Google AI Studio) is a fast way to get started. While your app and idea matures and you'd like to leverage more MLOps tools that streamline the usage, deployment, and monitoring of models, you can move to Google Cloud Vertex AI which provides Gemini APIs along with many other features. Basically, to help you productionize your app. ([reference](https://stackoverflow.com/questions/78007243/utilizing-gemini-through-vertex-ai-or-through-google-generative-ai)) - -### Step 1: Install AutoGen.Gemini - -First, install the AutoGen.Gemini package using the following command: - -```bash -dotnet add package AutoGen.Gemini -``` - -### Step 2: Add using statement - -[!code-csharp[](../../../sample/AutoGen.Gemini.Sample/Chat_With_Google_Gemini.cs?name=Using)] - -### Step 3: Create a Gemini agent - -[!code-csharp[](../../../sample/AutoGen.Gemini.Sample/Chat_With_Google_Gemini.cs?name=Create_Gemini_Agent)] - -### Step 4: Chat with Gemini - -[!code-csharp[](../../../sample/AutoGen.Gemini.Sample/Chat_With_Google_Gemini.cs?name=Chat_With_Google_Gemini)] \ No newline at end of file diff --git a/dotnet/website/articles/AutoGen.Gemini/Chat-with-vertex-gemini.md b/dotnet/website/articles/AutoGen.Gemini/Chat-with-vertex-gemini.md deleted file mode 100644 index e944e4901d..0000000000 --- a/dotnet/website/articles/AutoGen.Gemini/Chat-with-vertex-gemini.md +++ /dev/null @@ -1,32 +0,0 @@ -This example shows how to use @AutoGen.Gemini.GeminiChatAgent to connect to Vertex AI Gemini API and chat with Gemini model. - -To run this example, you need to have a project on Google Cloud with access to Vertex AI API. For more information please refer to [Google Vertex AI](https://cloud.google.com/vertex-ai/docs). - -> [!NOTE] -> You can find the complete sample code [here](https://github.com/ag2ai/ag2/blob/main/dotnet/sample/AutoGen.Gemini.Sample/Chat_With_Vertex_Gemini.cs) - -> [!NOTE] -> What's the difference between Google AI Gemini and Vertex AI Gemini? -> -> Gemini is a series of large language models developed by Google. You can use it either from Google AI API or Vertex AI API. If you are relatively new to Gemini and wants to explore the feature and build some prototype for your chatbot app, Google AI APIs (with Google AI Studio) is a fast way to get started. While your app and idea matures and you'd like to leverage more MLOps tools that streamline the usage, deployment, and monitoring of models, you can move to Google Cloud Vertex AI which provides Gemini APIs along with many other features. Basically, to help you productionize your app. ([reference](https://stackoverflow.com/questions/78007243/utilizing-gemini-through-vertex-ai-or-through-google-generative-ai)) - -### Step 1: Install AutoGen.Gemini - -First, install the AutoGen.Gemini package using the following command: - -```bash -dotnet add package AutoGen.Gemini -``` - -### Step 2: Add using statement - -[!code-csharp[](../../../sample/AutoGen.Gemini.Sample/Chat_With_Vertex_Gemini.cs?name=Using)] - -### Step 3: Create a Gemini agent - -[!code-csharp[](../../../sample/AutoGen.Gemini.Sample/Chat_With_Vertex_Gemini.cs?name=Create_Gemini_Agent)] - - -### Step 4: Chat with Gemini - -[!code-csharp[](../../../sample/AutoGen.Gemini.Sample/Chat_With_Vertex_Gemini.cs?name=Chat_With_Vertex_Gemini)] \ No newline at end of file diff --git a/dotnet/website/articles/AutoGen.Gemini/Function-call-with-gemini.md b/dotnet/website/articles/AutoGen.Gemini/Function-call-with-gemini.md deleted file mode 100644 index 38313521a2..0000000000 --- a/dotnet/website/articles/AutoGen.Gemini/Function-call-with-gemini.md +++ /dev/null @@ -1,38 +0,0 @@ -This example shows how to use @AutoGen.Gemini.GeminiChatAgent to make function call. This example is modified from [gemini-api function call example](https://ai.google.dev/gemini-api/docs/function-calling) - -To run this example, you need to have a project on Google Cloud with access to Vertex AI API. For more information please refer to [Google Vertex AI](https://cloud.google.com/vertex-ai/docs). - - -> [!NOTE] -> You can find the complete sample code [here](https://github.com/ag2ai/ag2/blob/main/dotnet/sample/AutoGen.Gemini.Sample/Function_Call_With_Gemini.cs) - -### Step 1: Install AutoGen.Gemini and AutoGen.SourceGenerator - -First, install the AutoGen.Gemini package using the following command: - -```bash -dotnet add package AutoGen.Gemini -dotnet add package AutoGen.SourceGenerator -``` - -The AutoGen.SourceGenerator package is required to generate the @AutoGen.Core.FunctionContract. For more information, please refer to [Create-type-safe-function-call](../Create-type-safe-function-call.md) - -### Step 2: Add using statement -[!code-csharp[](../../../sample/AutoGen.Gemini.Sample/Function_call_with_gemini.cs?name=Using)] - -### Step 3: Create `MovieFunction` - -[!code-csharp[](../../../sample/AutoGen.Gemini.Sample/Function_call_with_gemini.cs?name=MovieFunction)] - -### Step 4: Create a Gemini agent - -[!code-csharp[](../../../sample/AutoGen.Gemini.Sample/Function_call_with_gemini.cs?name=Create_Gemini_Agent)] - -### Step 5: Single turn function call - -[!code-csharp[](../../../sample/AutoGen.Gemini.Sample/Function_call_with_gemini.cs?name=Single_turn)] - -### Step 6: Multi-turn function call - -[!code-csharp[](../../../sample/AutoGen.Gemini.Sample/Function_call_with_gemini.cs?name=Multi_turn)] - diff --git a/dotnet/website/articles/AutoGen.Gemini/Image-chat-with-gemini.md b/dotnet/website/articles/AutoGen.Gemini/Image-chat-with-gemini.md deleted file mode 100644 index 0efd6e8e80..0000000000 --- a/dotnet/website/articles/AutoGen.Gemini/Image-chat-with-gemini.md +++ /dev/null @@ -1,25 +0,0 @@ -This example shows how to use @AutoGen.Gemini.GeminiChatAgent for image chat with Gemini model. - -To run this example, you need to have a project on Google Cloud with access to Vertex AI API. For more information please refer to [Google Vertex AI](https://cloud.google.com/vertex-ai/docs). - - -> [!NOTE] -> You can find the complete sample code [here](https://github.com/ag2ai/ag2/blob/main/dotnet/sample/AutoGen.Gemini.Sample/Image_Chat_With_Vertex_Gemini.cs) - -### Step 1: Install AutoGen.Gemini - -First, install the AutoGen.Gemini package using the following command: - -```bash -dotnet add package AutoGen.Gemini -``` - -### Step 2: Add using statement -[!code-csharp[](../../../sample/AutoGen.Gemini.Sample/Image_Chat_With_Vertex_Gemini.cs?name=Using)] - -### Step 3: Create a Gemini agent - -[!code-csharp[](../../../sample/AutoGen.Gemini.Sample/Image_Chat_With_Vertex_Gemini.cs?name=Create_Gemini_Agent)] - -### Step 4: Send image to Gemini -[!code-csharp[](../../../sample/AutoGen.Gemini.Sample/Image_Chat_With_Vertex_Gemini.cs?name=Send_Image_Request)] diff --git a/dotnet/website/articles/AutoGen.Gemini/Overview.md b/dotnet/website/articles/AutoGen.Gemini/Overview.md deleted file mode 100644 index 9c3e912e89..0000000000 --- a/dotnet/website/articles/AutoGen.Gemini/Overview.md +++ /dev/null @@ -1,12 +0,0 @@ -# AutoGen.Gemini Overview - -AutoGen.Gemini is a package that provides seamless integration with Google Gemini. It provides the following agent: - -- @AutoGen.Gemini.GeminiChatAgent: The agent that connects to Google Gemini or Vertex AI Gemini. It supports chat, multi-modal chat, and function call. - -AutoGen.Gemini also provides the following middleware: -- @AutoGen.Gemini.GeminiMessageConnector: The middleware that converts the Gemini message to AutoGen built-in message type. - -## Examples - -You can find more examples under the [gemini sample project](https://github.com/ag2ai/ag2/tree/main/dotnet/sample/AutoGen.Gemini.Sample) \ No newline at end of file diff --git a/dotnet/website/articles/AutoGen.Ollama/Chat-with-llama.md b/dotnet/website/articles/AutoGen.Ollama/Chat-with-llama.md deleted file mode 100644 index 0c98ef66f1..0000000000 --- a/dotnet/website/articles/AutoGen.Ollama/Chat-with-llama.md +++ /dev/null @@ -1,27 +0,0 @@ -This example shows how to use @AutoGen.Ollama.OllamaAgent to connect to Ollama server and chat with LLaVA model. - -To run this example, you need to have an Ollama server running aside and have `llama3:latest` model installed. For how to setup an Ollama server, please refer to [Ollama](https://ollama.com/). - -> [!NOTE] -> You can find the complete sample code [here](https://github.com/ag2ai/ag2/blob/main/dotnet/sample/AutoGen.Ollama.Sample/Chat_With_LLaMA.cs) - -### Step 1: Install AutoGen.Ollama - -First, install the AutoGen.Ollama package using the following command: - -```bash -dotnet add package AutoGen.Ollama -``` - -For how to install from nightly build, please refer to [Installation](../Installation.md). - -### Step 2: Add using statement - -[!code-csharp[](../../../sample/AutoGen.Ollama.Sample/Chat_With_LLaMA.cs?name=Using)] - -### Step 3: Create and chat @AutoGen.Ollama.OllamaAgent - -In this step, we create an @AutoGen.Ollama.OllamaAgent and connect it to the Ollama server. - -[!code-csharp[](../../../sample/AutoGen.Ollama.Sample/Chat_With_LLaMA.cs?name=Create_Ollama_Agent)] - diff --git a/dotnet/website/articles/AutoGen.Ollama/Chat-with-llava.md b/dotnet/website/articles/AutoGen.Ollama/Chat-with-llava.md deleted file mode 100644 index 2423ebcfa6..0000000000 --- a/dotnet/website/articles/AutoGen.Ollama/Chat-with-llava.md +++ /dev/null @@ -1,29 +0,0 @@ -This sample shows how to use @AutoGen.Ollama.OllamaAgent to chat with LLaVA model. - -To run this example, you need to have an Ollama server running aside and have `llava:latest` model installed. For how to setup an Ollama server, please refer to [Ollama](https://ollama.com/). - -> [!NOTE] -> You can find the complete sample code [here](https://github.com/ag2ai/ag2/blob/main/dotnet/sample/AutoGen.Ollama.Sample/Chat_With_LLaVA.cs) - -### Step 1: Install AutoGen.Ollama - -First, install the AutoGen.Ollama package using the following command: - -```bash -dotnet add package AutoGen.Ollama -``` - -For how to install from nightly build, please refer to [Installation](../Installation.md). - -### Step 2: Add using statement - -[!code-csharp[](../../../sample/AutoGen.Ollama.Sample/Chat_With_LLaVA.cs?name=Using)] - -### Step 3: Create @AutoGen.Ollama.OllamaAgent - -[!code-csharp[](../../../sample/AutoGen.Ollama.Sample/Chat_With_LLaVA.cs?name=Create_Ollama_Agent)] - -### Step 4: Start MultiModal Chat -LLaVA is a multimodal model that supports both text and image inputs. In this step, we create an image message along with a question about the image. - -[!code-csharp[](../../../sample/AutoGen.Ollama.Sample/Chat_With_LLaVA.cs?name=Send_Message)] \ No newline at end of file diff --git a/dotnet/website/articles/AutoGen.SemanticKernel/AutoGen-SemanticKernel-Overview.md b/dotnet/website/articles/AutoGen.SemanticKernel/AutoGen-SemanticKernel-Overview.md deleted file mode 100644 index d28c762f51..0000000000 --- a/dotnet/website/articles/AutoGen.SemanticKernel/AutoGen-SemanticKernel-Overview.md +++ /dev/null @@ -1,19 +0,0 @@ -## AutoGen.SemanticKernel Overview - -AutoGen.SemanticKernel is a package that provides seamless integration with Semantic Kernel. It provides the following agents: -- @AutoGen.SemanticKernel.SemanticKernelAgent: A slim wrapper agent over `Kernel` that only support original `ChatMessageContent` type via `IMessage`. To support more AutoGen built-in message type, register the agent with @AutoGen.SemanticKernel.SemanticKernelChatMessageContentConnector. -- @AutoGen.SemanticKernel.SemanticKernelChatCompletionAgent: A slim wrapper agent over `Microsoft.SemanticKernel.Agents.ChatCompletionAgent`. - -AutoGen.SemanticKernel also provides the following middleware: -- @AutoGen.SemanticKernel.SemanticKernelChatMessageContentConnector: A connector that convert the message from AutoGen built-in message types to `ChatMessageContent` and vice versa. At the current stage, it only supports conversation between @AutoGen.Core.TextMessage, @AutoGen.Core.ImageMessage and @AutoGen.Core.MultiModalMessage. Function call message type like @AutoGen.Core.ToolCallMessage and @AutoGen.Core.ToolCallResultMessage are not supported yet. -- @AutoGen.SemanticKernel.KernelPluginMiddleware: A middleware that allows you to use semantic kernel plugins in other AutoGen agents like @AutoGen.OpenAI.OpenAIChatAgent. - -### Get start with AutoGen.SemanticKernel - -To get start with AutoGen.SemanticKernel, firstly, follow the [installation guide](../Installation.md) to make sure you add the AutoGen feed correctly. Then add `AutoGen.SemanticKernel` package to your project file. - -```xml - - - -``` \ No newline at end of file diff --git a/dotnet/website/articles/AutoGen.SemanticKernel/SemanticKernelAgent-simple-chat.md b/dotnet/website/articles/AutoGen.SemanticKernel/SemanticKernelAgent-simple-chat.md deleted file mode 100644 index 728cb7a56d..0000000000 --- a/dotnet/website/articles/AutoGen.SemanticKernel/SemanticKernelAgent-simple-chat.md +++ /dev/null @@ -1,9 +0,0 @@ -You can chat with @AutoGen.SemanticKernel.SemanticKernelAgent using both streaming and non-streaming methods and use native `ChatMessageContent` type via `IMessage`. - -The following example shows how to create an @AutoGen.SemanticKernel.SemanticKernelAgent and chat with it using non-streaming method: - -[!code-csharp[](../../../sample/AutoGen.BasicSamples/CodeSnippet/SemanticKernelCodeSnippet.cs?name=create_semantic_kernel_agent)] - -@AutoGen.SemanticKernel.SemanticKernelAgent also supports streaming chat via @AutoGen.Core.IStreamingAgent.GenerateStreamingReplyAsync*. - -[!code-csharp[](../../../sample/AutoGen.BasicSamples/CodeSnippet/SemanticKernelCodeSnippet.cs?name=create_semantic_kernel_agent_streaming)] diff --git a/dotnet/website/articles/AutoGen.SemanticKernel/SemanticKernelAgent-support-more-messages.md b/dotnet/website/articles/AutoGen.SemanticKernel/SemanticKernelAgent-support-more-messages.md deleted file mode 100644 index 139b6efa65..0000000000 --- a/dotnet/website/articles/AutoGen.SemanticKernel/SemanticKernelAgent-support-more-messages.md +++ /dev/null @@ -1,10 +0,0 @@ -@AutoGen.SemanticKernel.SemanticKernelAgent only supports the original `ChatMessageContent` type via `IMessage`. To support more AutoGen built-in message types like @AutoGen.Core.TextMessage, @AutoGen.Core.ImageMessage, @AutoGen.Core.MultiModalMessage, you can register the agent with @AutoGen.SemanticKernel.SemanticKernelChatMessageContentConnector. The @AutoGen.SemanticKernel.SemanticKernelChatMessageContentConnector will convert the message from AutoGen built-in message types to `ChatMessageContent` and vice versa. -> [!NOTE] -> At the current stage, @AutoGen.SemanticKernel.SemanticKernelChatMessageContentConnector only supports conversation for the followng built-in @AutoGen.Core.IMessage -> - @AutoGen.Core.TextMessage -> - @AutoGen.Core.ImageMessage -> - @AutoGen.Core.MultiModalMessage -> -> Function call message type like @AutoGen.Core.ToolCallMessage and @AutoGen.Core.ToolCallResultMessage are not supported yet. - -[!code-csharp[](../../../sample/AutoGen.BasicSamples/CodeSnippet/SemanticKernelCodeSnippet.cs?name=register_semantic_kernel_chat_message_content_connector)] \ No newline at end of file diff --git a/dotnet/website/articles/AutoGen.SemanticKernel/SemanticKernelChatAgent-simple-chat.md b/dotnet/website/articles/AutoGen.SemanticKernel/SemanticKernelChatAgent-simple-chat.md deleted file mode 100644 index fd16e6f34a..0000000000 --- a/dotnet/website/articles/AutoGen.SemanticKernel/SemanticKernelChatAgent-simple-chat.md +++ /dev/null @@ -1,22 +0,0 @@ -`AutoGen.SemanticKernel` provides built-in support for `ChatCompletionAgent` via @AutoGen.SemanticKernel.SemanticKernelChatCompletionAgent. By default the @AutoGen.SemanticKernel.SemanticKernelChatCompletionAgent only supports the original `ChatMessageContent` type via `IMessage`. To support more AutoGen built-in message types like @AutoGen.Core.TextMessage, @AutoGen.Core.ImageMessage, @AutoGen.Core.MultiModalMessage, you can register the agent with @AutoGen.SemanticKernel.SemanticKernelChatMessageContentConnector. The @AutoGen.SemanticKernel.SemanticKernelChatMessageContentConnector will convert the message from AutoGen built-in message types to `ChatMessageContent` and vice versa. - -The following step-by-step example shows how to create an @AutoGen.SemanticKernel.SemanticKernelChatCompletionAgent and chat with it: - -> [!NOTE] -> You can find the complete sample code [here](https://github.com/ag2ai/ag2/blob/main/dotnet/sample/AutoGen.SemanticKernel.Sample/Create_Semantic_Kernel_Chat_Agent.cs). - -### Step 1: add using statement -[!code-csharp[](../../../sample/AutoGen.SemanticKernel.Sample/Create_Semantic_Kernel_Chat_Agent.cs?name=Using)] - -### Step 2: create kernel -[!code-csharp[](../../../sample/AutoGen.SemanticKernel.Sample/Create_Semantic_Kernel_Chat_Agent.cs?name=Create_Kernel)] - -### Step 3: create ChatCompletionAgent -[!code-csharp[](../../../sample/AutoGen.SemanticKernel.Sample/Create_Semantic_Kernel_Chat_Agent.cs?name=Create_ChatCompletionAgent)] - -### Step 4: create @AutoGen.SemanticKernel.SemanticKernelChatCompletionAgent -In this step, we create an @AutoGen.SemanticKernel.SemanticKernelChatCompletionAgent and register it with @AutoGen.SemanticKernel.SemanticKernelChatMessageContentConnector. The @AutoGen.SemanticKernel.SemanticKernelChatMessageContentConnector will convert the message from AutoGen built-in message types to `ChatMessageContent` and vice versa. -[!code-csharp[](../../../sample/AutoGen.SemanticKernel.Sample/Create_Semantic_Kernel_Chat_Agent.cs?name=Create_SemanticKernelChatCompletionAgent)] - -### Step 5: chat with @AutoGen.SemanticKernel.SemanticKernelChatCompletionAgent -[!code-csharp[](../../../sample/AutoGen.SemanticKernel.Sample/Create_Semantic_Kernel_Chat_Agent.cs?name=Send_Message)] \ No newline at end of file diff --git a/dotnet/website/articles/AutoGen.SemanticKernel/Use-kernel-plugin-in-other-agents.md b/dotnet/website/articles/AutoGen.SemanticKernel/Use-kernel-plugin-in-other-agents.md deleted file mode 100644 index 9a646feac8..0000000000 --- a/dotnet/website/articles/AutoGen.SemanticKernel/Use-kernel-plugin-in-other-agents.md +++ /dev/null @@ -1,27 +0,0 @@ -In semantic kernel, a kernel plugin is a collection of kernel functions that can be invoked during LLM calls. Semantic kernel provides a list of built-in plugins, like [core plugins](https://github.com/microsoft/semantic-kernel/tree/main/dotnet/src/Plugins/Plugins.Core), [web search plugin](https://github.com/microsoft/semantic-kernel/tree/main/dotnet/src/Plugins/Plugins.Web) and many more. You can also create your own plugins and use them in semantic kernel. Kernel plugins greatly extend the capabilities of semantic kernel and can be used to perform various tasks like web search, image search, text summarization, etc. - -`AutoGen.SemanticKernel` provides a middleware called @AutoGen.SemanticKernel.KernelPluginMiddleware that allows you to use semantic kernel plugins in other AutoGen agents like @AutoGen.OpenAI.OpenAIChatAgent. The following example shows how to define a simple plugin with a single `GetWeather` function and use it in @AutoGen.OpenAI.OpenAIChatAgent. - -> [!NOTE] -> You can find the complete sample code [here](https://github.com/ag2ai/ag2/blob/main/dotnet/sample/AutoGen.SemanticKernel.Sample/Use_Kernel_Functions_With_Other_Agent.cs) - -### Step 1: add using statement -[!code-csharp[](../../../sample/AutoGen.SemanticKernel.Sample/Use_Kernel_Functions_With_Other_Agent.cs?name=Using)] - -### Step 2: create plugin - -In this step, we create a simple plugin with a single `GetWeather` function that takes a location as input and returns the weather information for that location. - -[!code-csharp[](../../../sample/AutoGen.SemanticKernel.Sample/Use_Kernel_Functions_With_Other_Agent.cs?name=Create_plugin)] - -### Step 3: create OpenAIChatAgent and use the plugin - -In this step, we firstly create a @AutoGen.SemanticKernel.KernelPluginMiddleware and register the previous plugin with it. The `KernelPluginMiddleware` will load the plugin and make the functions available for use in other agents. Followed by creating an @AutoGen.OpenAI.OpenAIChatAgent and register it with the `KernelPluginMiddleware`. - -[!code-csharp[](../../../sample/AutoGen.SemanticKernel.Sample/Use_Kernel_Functions_With_Other_Agent.cs?name=Use_plugin)] - -### Step 4: chat with OpenAIChatAgent - -In this final step, we start the chat with the @AutoGen.OpenAI.OpenAIChatAgent by asking the weather in Seattle. The `OpenAIChatAgent` will use the `GetWeather` function from the plugin to get the weather information for Seattle. - -[!code-csharp[](../../../sample/AutoGen.SemanticKernel.Sample/Use_Kernel_Functions_With_Other_Agent.cs?name=Send_message)] \ No newline at end of file diff --git a/dotnet/website/articles/Built-in-messages.md b/dotnet/website/articles/Built-in-messages.md deleted file mode 100644 index 4f30a07904..0000000000 --- a/dotnet/website/articles/Built-in-messages.md +++ /dev/null @@ -1,37 +0,0 @@ -## An overview of built-in @AutoGen.Core.IMessage types - -Start from 0.0.9, AutoGen introduces the @AutoGen.Core.IMessage and @AutoGen.Core.IMessage`1 types to provide a unified message interface for different agents. The @AutoGen.Core.IMessage is a non-generic interface that represents a message. The @AutoGen.Core.IMessage`1 is a generic interface that represents a message with a specific `T` where `T` can be any type. - -Besides, AutoGen also provides a set of built-in message types that implement the @AutoGen.Core.IMessage and @AutoGen.Core.IMessage`1 interfaces. These built-in message types are designed to cover different types of messages as much as possible. The built-in message types include: - -> [!NOTE] -> The minimal requirement for an agent to be used as admin in @AutoGen.Core.GroupChat is to support @AutoGen.Core.TextMessage. - -> [!NOTE] -> @AutoGen.Core.Message will be deprecated in 0.0.14. Please replace it with a more specific message type like @AutoGen.Core.TextMessage, @AutoGen.Core.ImageMessage, etc. - -- @AutoGen.Core.TextMessage: A message that contains a piece of text. -- @AutoGen.Core.ImageMessage: A message that contains an image. -- @AutoGen.Core.MultiModalMessage: A message that contains multiple modalities like text, image, etc. -- @AutoGen.Core.ToolCallMessage: A message that represents a function call request. -- @AutoGen.Core.ToolCallResultMessage: A message that represents a function call result. -- @AutoGen.Core.ToolCallAggregateMessage: A message that contains both @AutoGen.Core.ToolCallMessage and @AutoGen.Core.ToolCallResultMessage. This type of message is used by @AutoGen.Core.FunctionCallMiddleware to aggregate both @AutoGen.Core.ToolCallMessage and @AutoGen.Core.ToolCallResultMessage into a single message. -- @AutoGen.Core.MessageEnvelope`1: A message that represents an envelope that contains a message of any type. -- @AutoGen.Core.Message: The original message type before 0.0.9. This message type is reserved for backward compatibility. It is recommended to replace it with a more specific message type like @AutoGen.Core.TextMessage, @AutoGen.Core.ImageMessage, etc. - -### Streaming message support -AutoGen also introduces @AutoGen.Core.IStreamingMessage and @AutoGen.Core.IStreamingMessage`1 which are used in streaming call api. The following built-in message types implement the @AutoGen.Core.IStreamingMessage and @AutoGen.Core.IStreamingMessage`1 interfaces: - -> [!NOTE] -> All @AutoGen.Core.IMessage is also a @AutoGen.Core.IStreamingMessage. That means you can return an @AutoGen.Core.IMessage from a streaming call method. It's also recommended to return the final updated result instead of the last update as the last message in the streaming call method to indicate the end of the stream, which saves caller's effort of assembling the final result from multiple updates. -- @AutoGen.Core.TextMessageUpdate: A message that contains a piece of text update. -- @AutoGen.Core.ToolCallMessageUpdate: A message that contains a function call request update. - -#### Usage - -The below code snippet shows how to print a streaming update to console and update the final result on the caller side. -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/BuildInMessageCodeSnippet.cs?name=StreamingCallCodeSnippet)] - -If the agent returns a final result instead of the last update as the last message in the streaming call method, the caller can directly use the final result without assembling the final result from multiple updates. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/BuildInMessageCodeSnippet.cs?name=StreamingCallWithFinalMessage)] diff --git a/dotnet/website/articles/Consume-LLM-server-from-LM-Studio.md b/dotnet/website/articles/Consume-LLM-server-from-LM-Studio.md deleted file mode 100644 index dff384a267..0000000000 --- a/dotnet/website/articles/Consume-LLM-server-from-LM-Studio.md +++ /dev/null @@ -1,20 +0,0 @@ -## Consume LLM server from LM Studio -You can use @AutoGen.LMStudio.LMStudioAgent from `AutoGen.LMStudio` package to consume openai-like API from LMStudio local server. - -### What's LM Studio -[LM Studio](https://lmstudio.ai/) is an app that allows you to deploy and inference hundreds of thousands of open-source language model on your local machine. It provides an in-app chat ui plus an openai-like API to interact with the language model programmatically. - -### Installation -- Install LM studio if you haven't done so. You can find the installation guide [here](https://lmstudio.ai/) -- Add `AutoGen.LMStudio` to your project. -```xml - - - -``` - -### Usage -The following code shows how to use `LMStudioAgent` to write a piece of C# code to calculate 100th of fibonacci. Before running the code, make sure you have local server from LM Studio running on `localhost:1234`. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example08_LMStudio.cs?name=lmstudio_using_statements)] -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example08_LMStudio.cs?name=lmstudio_example_1)] diff --git a/dotnet/website/articles/Create-a-user-proxy-agent.md b/dotnet/website/articles/Create-a-user-proxy-agent.md deleted file mode 100644 index 44441ed349..0000000000 --- a/dotnet/website/articles/Create-a-user-proxy-agent.md +++ /dev/null @@ -1,16 +0,0 @@ -## UserProxyAgent - -[`UserProxyAgent`](../api/AutoGen.UserProxyAgent.yml) is a special type of agent that can be used to proxy user input to another agent or group of agents. It supports the following human input modes: -- `ALWAYS`: Always ask user for input. -- `NEVER`: Never ask user for input. In this mode, the agent will use the default response (if any) to respond to the message. Or using underlying LLM model to generate response if provided. -- `AUTO`: Only ask user for input when conversation is terminated by the other agent(s). Otherwise, use the default response (if any) to respond to the message. Or using underlying LLM model to generate response if provided. - -> [!TIP] -> You can also set up `humanInputMode` when creating `AssistantAgent` to enable/disable human input. `UserProxyAgent` is equivalent to `AssistantAgent` with `humanInputMode` set to `ALWAYS`. Similarly, `AssistantAgent` is equivalent to `UserProxyAgent` with `humanInputMode` set to `NEVER`. - -### Create a `UserProxyAgent` with `HumanInputMode` set to `ALWAYS` - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/UserProxyAgentCodeSnippet.cs?name=code_snippet_1)] - -When running the code, the user proxy agent will ask user for input and use the input as response. -![code output](../images/articles/CreateUserProxyAgent/image-1.png) \ No newline at end of file diff --git a/dotnet/website/articles/Create-an-agent.md b/dotnet/website/articles/Create-an-agent.md deleted file mode 100644 index 1b56666daa..0000000000 --- a/dotnet/website/articles/Create-an-agent.md +++ /dev/null @@ -1,11 +0,0 @@ -## AssistantAgent - -[`AssistantAgent`](../api/AutoGen.AssistantAgent.yml) is a built-in agent in `AutoGen` that acts as an AI assistant. It uses LLM to generate response to user input. It also supports function call if the underlying LLM model supports it (e.g. `gpt-3.5-turbo-0613`). - -## Create an `AssistantAgent` using OpenAI model. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/CreateAnAgent.cs?name=code_snippet_1)] - -## Create an `AssistantAgent` using Azure OpenAI model. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/CreateAnAgent.cs?name=code_snippet_2)] diff --git a/dotnet/website/articles/Create-type-safe-function-call.md b/dotnet/website/articles/Create-type-safe-function-call.md deleted file mode 100644 index 82bc5e8440..0000000000 --- a/dotnet/website/articles/Create-type-safe-function-call.md +++ /dev/null @@ -1,41 +0,0 @@ -## Type-safe function call - -`AutoGen` provides a source generator to easness the trouble of manually craft function definition and function call wrapper from a function. To use this feature, simply add the `AutoGen.SourceGenerator` package to your project and decorate your function with @AutoGen.Core.FunctionAttribute. - -```bash -dotnet add package AutoGen.SourceGenerator -``` - -> [!NOTE] -> It's recommended to enable structural xml document support by setting `GenerateDocumentationFile` property to true in your project file. This allows source generator to leverage the documentation of the function when generating the function definition. - -```xml - - - true - -``` - -Then, create a `public partial` class to host the methods you want to use in AutoGen agents. The method has to be a `public` instance method and its return type must be `Task`. After the methods is defined, mark them with @AutoGen.FunctionAttribute attribute: - -> [!NOTE] -> A `public partial` class is required for the source generator to generate code. -> The method has to be a `public` instance method and its return type must be `Task`. -> Mark the method with @AutoGen.Core.FunctionAttribute attribute. - -Firstly, import the required namespaces: - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/TypeSafeFunctionCallCodeSnippet.cs?name=weather_report_using_statement)] - -Then, create a `WeatherReport` function and mark it with @AutoGen.Core.FunctionAttribute: - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/TypeSafeFunctionCallCodeSnippet.cs?name=weather_report)] - -The source generator will generate the @AutoGen.Core.FunctionContract and function call wrapper for `WeatherReport` in another partial class based on its signature and structural comments. The @AutoGen.Core.FunctionContract is introduced by [#1736](https://github.com/microsoft/autogen/pull/1736) and contains all the necessary metadata such as function name, parameters, and return type. It is LLM independent and can be used to generate openai function definition or semantic kernel function. The function call wrapper is a helper class that provides a type-safe way to call the function. - -> [!NOTE] -> If you are using VSCode as your editor, you may need to restart the editor to see the generated code. - -The following code shows how to generate openai function definition from the @AutoGen.Core.FunctionContract and call the function using the function call wrapper. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/TypeSafeFunctionCallCodeSnippet.cs?name=weather_report_consume)] diff --git a/dotnet/website/articles/Create-your-own-agent.md b/dotnet/website/articles/Create-your-own-agent.md deleted file mode 100644 index a4548817c7..0000000000 --- a/dotnet/website/articles/Create-your-own-agent.md +++ /dev/null @@ -1 +0,0 @@ -## Coming soon \ No newline at end of file diff --git a/dotnet/website/articles/Create-your-own-middleware.md b/dotnet/website/articles/Create-your-own-middleware.md deleted file mode 100644 index a4548817c7..0000000000 --- a/dotnet/website/articles/Create-your-own-middleware.md +++ /dev/null @@ -1 +0,0 @@ -## Coming soon \ No newline at end of file diff --git a/dotnet/website/articles/Function-call-middleware.md b/dotnet/website/articles/Function-call-middleware.md deleted file mode 100644 index 12c3c04153..0000000000 --- a/dotnet/website/articles/Function-call-middleware.md +++ /dev/null @@ -1 +0,0 @@ -# Coming soon \ No newline at end of file diff --git a/dotnet/website/articles/Function-call-overview.md b/dotnet/website/articles/Function-call-overview.md deleted file mode 100644 index e8dfc54cd7..0000000000 --- a/dotnet/website/articles/Function-call-overview.md +++ /dev/null @@ -1,52 +0,0 @@ -## Overview of function call - -In some LLM models, you can provide a list of function definitions to the model. The function definition is usually essentially an OpenAPI schema object which describes the function, its parameters and return value. And these function definitions tells the model what "functions" are available to be used to resolve the user's request. This feature greatly extend the capability of LLM models by enabling them to "execute" arbitrary function as long as it can be described as a function definition. - -Below is an example of a function definition for getting weather report for a city: - -> [!NOTE] -> To use function call, the underlying LLM model must support function call as well for the best experience. -> The model used in the example below is `gpt-3.5-turbo-0613`. -```json -{ - "name": "GetWeather", - "description": "Get the weather report for a city", - "parameters": { - "city": { - "type": "string", - "description": "The city name" - }, - "required": ["city"] - }, -} -``` - - - -When the model receives a message, it will intelligently decide whether to use function call or not based on the message received. If the model decides to use function call, it will generate a function call which can be used to invoke the actual function. The function call is a json object which contains the function name and its arguments. - -Below is an example of a function call object for getting weather report for Seattle: - -```json -{ - "name": "GetWeather", - "arguments": { - "city": "Seattle" - } -} -``` - -And when the function call is return to the caller, it can be used to invoke the actual function to get the weather report for Seattle. - -### Create type-safe function contract and function call wrapper use AutoGen.SourceGenerator -AutoGen provides a source generator to easness the trouble of manually craft function contract and function call wrapper from a function. To use this feature, simply add the `AutoGen.SourceGenerator` package to your project and decorate your function with `Function` attribute. - -For more information, please check out [Create type-safe function](Create-type-safe-function-call.md). - -### Use function call in an agent -AutoGen provides first-class support for function call in its agent story. Usually there are three ways to enable a function call in an agent. -- Pass function definitions when creating an agent. This only works if the agent supports pass function call from its constructor. -- Passing function definitions in @AutoGen.Core.GenerateReplyOptions when invoking an agent -- Register an agent with @AutoGen.Core.FunctionCallMiddleware to process and invoke function calls. - -For more information, please check out [Use function call in an agent](Use-function-call.md). \ No newline at end of file diff --git a/dotnet/website/articles/Function-call-with-ollama-and-litellm.md b/dotnet/website/articles/Function-call-with-ollama-and-litellm.md deleted file mode 100644 index 7dc7fe3d0d..0000000000 --- a/dotnet/website/articles/Function-call-with-ollama-and-litellm.md +++ /dev/null @@ -1,93 +0,0 @@ -This example shows how to use function call with local LLM models where [Ollama](https://ollama.com/) as local model provider and [LiteLLM](https://docs.litellm.ai/docs/) proxy server which provides an openai-api compatible interface. - -[![](https://img.shields.io/badge/Open%20on%20Github-grey?logo=github)](https://github.com/ag2ai/ag2/blob/main/dotnet/sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs) - -To run this example, the following prerequisites are required: -- Install [Ollama](https://ollama.com/) and [LiteLLM](https://docs.litellm.ai/docs/) on your local machine. -- A local model that supports function call. In this example `dolphincoder:latest` is used. - -## Install Ollama and pull `dolphincoder:latest` model -First, install Ollama by following the instructions on the [Ollama website](https://ollama.com/). - -After installing Ollama, pull the `dolphincoder:latest` model by running the following command: -```bash -ollama pull dolphincoder:latest -``` - -## Install LiteLLM and start the proxy server - -You can install LiteLLM by following the instructions on the [LiteLLM website](https://docs.litellm.ai/docs/). -```bash -pip install 'litellm[proxy]' -``` - -Then, start the proxy server by running the following command: - -```bash -litellm --model ollama_chat/dolphincoder --port 4000 -``` - -This will start an openai-api compatible proxy server at `http://localhost:4000`. You can verify if the server is running by observing the following output in the terminal: - -```bash -#------------------------------------------------------------# -# # -# 'The worst thing about this product is...' # -# https://github.com/BerriAI/litellm/issues/new # -# # -#------------------------------------------------------------# - -INFO: Application startup complete. -INFO: Uvicorn running on http://0.0.0.0:4000 (Press CTRL+C to quit) -``` - -## Install AutoGen and AutoGen.SourceGenerator -In your project, install the AutoGen and AutoGen.SourceGenerator package using the following command: - -```bash -dotnet add package AutoGen -dotnet add package AutoGen.SourceGenerator -``` - -The `AutoGen.SourceGenerator` package is used to automatically generate type-safe `FunctionContract` instead of manually defining them. For more information, please check out [Create type-safe function](Create-type-safe-function-call.md). - -And in your project file, enable structural xml document support by setting the `GenerateDocumentationFile` property to `true`: - -```xml - - - true - -``` - -## Define `WeatherReport` function and create @AutoGen.Core.FunctionCallMiddleware - -Create a `public partial` class to host the methods you want to use in AutoGen agents. The method has to be a `public` instance method and its return type must be `Task`. After the methods are defined, mark them with `AutoGen.Core.FunctionAttribute` attribute. - -[!code-csharp[Define WeatherReport function](../../sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs?name=Function)] - -Then create a @AutoGen.Core.FunctionCallMiddleware and add the `WeatherReport` function to the middleware. The middleware will pass the `FunctionContract` to the agent when generating a response, and process the tool call response when receiving a `ToolCallMessage`. -[!code-csharp[Define WeatherReport function](../../sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs?name=Create_tools)] - -## Create @AutoGen.OpenAI.OpenAIChatAgent with `GetWeatherReport` tool and chat with it - -Because LiteLLM proxy server is openai-api compatible, we can use @AutoGen.OpenAI.OpenAIChatAgent to connect to it as a third-party openai-api provider. The agent is also registered with a @AutoGen.Core.FunctionCallMiddleware which contains the `WeatherReport` tool. Therefore, the agent can call the `WeatherReport` tool when generating a response. - -[!code-csharp[Create an agent with tools](../../sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs?name=Create_Agent)] - -The reply from the agent will similar to the following: -```bash -AggregateMessage from assistant --------------------- -ToolCallMessage: -ToolCallMessage from assistant --------------------- -- GetWeatherAsync: {"city": "new york"} --------------------- - -ToolCallResultMessage: -ToolCallResultMessage from assistant --------------------- -- GetWeatherAsync: The weather in new york is 72 degrees and sunny. --------------------- -``` \ No newline at end of file diff --git a/dotnet/website/articles/Group-chat-overview.md b/dotnet/website/articles/Group-chat-overview.md deleted file mode 100644 index 6e31aeb598..0000000000 --- a/dotnet/website/articles/Group-chat-overview.md +++ /dev/null @@ -1,8 +0,0 @@ -@AutoGen.Core.IGroupChat is a fundamental feature in AutoGen. It provides a way to organize multiple agents under the same context and work together to resolve a given task. - -In AutoGen, there are two types of group chat: -- @AutoGen.Core.RoundRobinGroupChat : This group chat runs agents in a round-robin sequence. The chat history plus the most recent reply from the previous agent will be passed to the next agent. -- @AutoGen.Core.GroupChat : This group chat provides a more dynamic yet controlable way to determine the next speaker agent. You can either use a llm agent as group admin, or use a @AutoGen.Core.Graph, which is introduced by [this PR](https://github.com/microsoft/autogen/pull/1761), or both to determine the next speaker agent. - -> [!NOTE] -> In @AutoGen.Core.GroupChat, when only the group admin is used to determine the next speaker agent, it's recommended to use a more powerful llm model, such as `gpt-4` to ensure the best experience. diff --git a/dotnet/website/articles/Group-chat.md b/dotnet/website/articles/Group-chat.md deleted file mode 100644 index 058f4f2521..0000000000 --- a/dotnet/website/articles/Group-chat.md +++ /dev/null @@ -1,73 +0,0 @@ -@AutoGen.Core.GroupChat invokes agents in a dynamic way. On one hand, It relies on its admin agent to intellegently determines the next speaker based on conversation context, and on the other hand, it also allows you to control the conversation flow by using a @AutoGen.Core.Graph. This makes it a more dynamic yet controlable way to determine the next speaker agent. You can use @AutoGen.Core.GroupChat to create a dynamic group chat with multiple agents working together to resolve a given task. - -> [!NOTE] -> In @AutoGen.Core.GroupChat, when only the group admin is used to determine the next speaker agent, it's recommented to use a more powerful llm model, such as `gpt-4` to ensure the best experience. - -## Use @AutoGen.Core.GroupChat to implement a code interpreter chat flow -The following example shows how to create a dynamic group chat with @AutoGen.Core.GroupChat. In this example, we will create a dynamic group chat with 4 agents: `admin`, `coder`, `reviewer` and `runner`. Each agent has its own role in the group chat: - -### Code interpreter group chat -- `admin`: create task for group to work on and terminate the conversation when task is completed. In this example, the task to resolve is to calculate the 39th Fibonacci number. -- `coder`: a dotnet coder who can write code to resolve tasks. -- `reviewer`: a dotnet code reviewer who can review code written by `coder`. In this example, `reviewer` will examine if the code written by `coder` follows the condition below: - - has only one csharp code block. - - use top-level statements. - - is dotnet code snippet. - - print the result of the code snippet to console. -- `runner`: a dotnet code runner who can run code written by `coder` and print the result. - -```mermaid -flowchart LR - subgraph Group Chat - B[Amin] - C[Coder] - D[Reviewer] - E[Runner] - end -``` - -> [!NOTE] -> The complete code of this example can be found in `Example07_Dynamic_GroupChat_Calculate_Fibonacci` - -### Create group chat - -The code below shows how to create a dynamic group chat with @AutoGen.Core.GroupChat. In this example, we will create a dynamic group chat with 4 agents: `admin`, `coder`, `reviewer` and `runner`. In this case we don't pass a workflow to the group chat, so the group chat will use driven by the admin agent. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example07_Dynamic_GroupChat_Calculate_Fibonacci.cs?name=create_group_chat)] - -> [!TIP] -> You can set up initial context for the group chat using @AutoGen.Core.GroupChatExtension.SendIntroduction*. The initial context can help group admin orchestrates the conversation flow. - -Output: - -![GroupChat](../images/articles/DynamicGroupChat/dynamicChat.gif) - -### Below are break-down of how agents are created and their roles in the group chat. - -- Create admin agent - -The code below shows how to create `admin` agent. `admin` agent will create a task for group to work on and terminate the conversation when task is completed. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example07_Dynamic_GroupChat_Calculate_Fibonacci.cs?name=create_admin)] - -- Create coder agent - -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example07_Dynamic_GroupChat_Calculate_Fibonacci.cs?name=create_coder)] - -- Create reviewer agent - -The code below shows how to create `reviewer` agent. `reviewer` agent is a dotnet code reviewer who can review code written by `coder`. In this example, a `function` is used to examine if the code written by `coder` follows the condition. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example07_Dynamic_GroupChat_Calculate_Fibonacci.cs?name=reviewer_function)] - -> [!TIP] -> You can use @AutoGen.Core.FunctionAttribute to generate type-safe function definition and function call wrapper for the function. For more information, please check out [Create type safe function call](./Create-type-safe-function-call.md). - -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example07_Dynamic_GroupChat_Calculate_Fibonacci.cs?name=create_reviewer)] - -- Create runner agent - -> [!TIP] -> `AutoGen` provides a built-in support for running code snippet. For more information, please check out [Execute code snippet](./Run-dotnet-code.md). - -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example07_Dynamic_GroupChat_Calculate_Fibonacci.cs?name=create_runner)] diff --git a/dotnet/website/articles/Installation.md b/dotnet/website/articles/Installation.md deleted file mode 100644 index 30b55442d2..0000000000 --- a/dotnet/website/articles/Installation.md +++ /dev/null @@ -1,67 +0,0 @@ -### Current version: - -[![NuGet version](https://badge.fury.io/nu/AutoGen.Core.svg)](https://badge.fury.io/nu/AutoGen.Core) - -AutoGen.Net provides the following packages, you can choose to install one or more of them based on your needs: - -- `AutoGen`: The one-in-all package. This package has dependencies over `AutoGen.Core`, `AutoGen.OpenAI`, `AutoGen.LMStudio`, `AutoGen.SemanticKernel` and `AutoGen.SourceGenerator`. -- `AutoGen.Core`: The core package, this package provides the abstraction for message type, agent and group chat. -- `AutoGen.OpenAI`: This package provides the integration agents over openai models. -- `AutoGen.Mistral`: This package provides the integration agents for Mistral.AI models. -- `AutoGen.Ollama`: This package provides the integration agents for [Ollama](https://ollama.com/). -- `AutoGen.Anthropic`: This package provides the integration agents for [Anthropic](https://www.anthropic.com/api) -- `AutoGen.LMStudio`: This package provides the integration agents from LM Studio. -- `AutoGen.SemanticKernel`: This package provides the integration agents over semantic kernel. -- `AutoGen.Gemini`: This package provides the integration agents from [Google Gemini](https://gemini.google.com/). -- `AutoGen.AzureAIInference`: This package provides the integration agents for [Azure AI Inference](https://www.nuget.org/packages/Azure.AI.Inference). -- `AutoGen.SourceGenerator`: This package carries a source generator that adds support for type-safe function definition generation. -- `AutoGen.DotnetInteractive`: This packages carries dotnet interactive support to execute code snippets. The current supported language is C#, F#, powershell and python. - ->[!Note] -> Help me choose -> - If you just want to install one package and enjoy the core features of AutoGen, choose `AutoGen`. -> - If you want to leverage AutoGen's abstraction only and want to avoid introducing any other dependencies, like `Azure.AI.OpenAI` or `Semantic Kernel`, choose `AutoGen.Core`. You will need to implement your own agent, but you can still use AutoGen core features like group chat, built-in message type, workflow and middleware. ->- If you want to use AutoGen with openai, choose `AutoGen.OpenAI`, similarly, choose `AutoGen.LMStudio` or `AutoGen.SemanticKernel` if you want to use agents from LM Studio or semantic kernel. ->- If you just want the type-safe source generation for function call and don't want any other features, which even include the AutoGen's abstraction, choose `AutoGen.SourceGenerator`. - -Then, install the package using the following command: - -```bash -dotnet add package AUTOGEN_PACKAGES -``` - -### Consume nightly build -To consume nightly build, you can add one of the following feeds to your `NuGet.config` or global nuget config: -- ![Static Badge](https://img.shields.io/badge/public-blue?style=flat) ![Static Badge](https://img.shields.io/badge/github-grey?style=flat): https://nuget.pkg.github.com/microsoft/index.json -- ![Static Badge](https://img.shields.io/badge/public-blue?style=flat) ![Static Badge](https://img.shields.io/badge/myget-grey?style=flat): https://www.myget.org/F/agentchat/api/v3/index.json -- ![Static Badge](https://img.shields.io/badge/internal-blue?style=flat) ![Static Badge](https://img.shields.io/badge/azure_devops-grey?style=flat) : https://devdiv.pkgs.visualstudio.com/DevDiv/_packaging/AutoGen/nuget/v3/index.json - -To add a local `NuGet.config`, create a file named `NuGet.config` in the root of your project and add the following content: -```xml - - - - - - - - - - - -``` - -To add the feed to your global nuget config. You can do this by running the following command in your terminal: -```bash -dotnet nuget add source FEED_URL --name AutoGen - -# dotnet-tools contains Microsoft.DotNet.Interactive.VisualStudio package, which is used by AutoGen.DotnetInteractive -dotnet nuget add source https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json --name dotnet-tools -``` - -Once you have added the feed, you can install the nightly-build package using the following command: -```bash -dotnet add package AUTOGEN_PACKAGES VERSION -``` - - diff --git a/dotnet/website/articles/Middleware-overview.md b/dotnet/website/articles/Middleware-overview.md deleted file mode 100644 index 42355de33e..0000000000 --- a/dotnet/website/articles/Middleware-overview.md +++ /dev/null @@ -1,27 +0,0 @@ -`Middleware` is a key feature in AutoGen.Net that enables you to customize the behavior of @AutoGen.Core.IAgent.GenerateReplyAsync*. It's similar to the middleware concept in ASP.Net and is widely used in AutoGen.Net for various scenarios, such as function call support, converting message of different types, print message, gather user input, etc. - -Here are a few examples of how middleware is used in AutoGen.Net: -- @AutoGen.AssistantAgent is essentially an agent with @AutoGen.Core.FunctionCallMiddleware, @AutoGen.HumanInputMiddleware and default reply middleware. -- @AutoGen.OpenAI.GPTAgent is essentially an @AutoGen.OpenAI.OpenAIChatAgent with @AutoGen.Core.FunctionCallMiddleware and @AutoGen.OpenAI.OpenAIChatRequestMessageConnector. - -## Use middleware in an agent -To use middleware in an existing agent, you can either create a @AutoGen.Core.MiddlewareAgent on top of the original agent or register middleware functions to the original agent. - -### Create @AutoGen.Core.MiddlewareAgent on top of the original agent -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/MiddlewareAgentCodeSnippet.cs?name=create_middleware_agent_with_original_agent)] - -### Register middleware functions to the original agent -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/MiddlewareAgentCodeSnippet.cs?name=register_middleware_agent)] - -## Short-circuit the next agent -The example below shows how to short-circuit the inner agent - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/MiddlewareAgentCodeSnippet.cs?name=short_circuit_middleware_agent)] - -> [!Note] -> When multiple middleware functions are registered, the order of middleware functions is first registered, last invoked. - -## Streaming middleware -You can also modify the behavior of @AutoGen.Core.IStreamingAgent.GenerateStreamingReplyAsync* by registering streaming middleware to it. One example is @AutoGen.OpenAI.OpenAIChatRequestMessageConnector which converts `StreamingChatCompletionsUpdate` to one of `AutoGen.Core.TextMessageUpdate` or `AutoGen.Core.ToolCallMessageUpdate`. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/MiddlewareAgentCodeSnippet.cs?name=register_streaming_middleware)] \ No newline at end of file diff --git a/dotnet/website/articles/MistralChatAgent-count-token-usage.md b/dotnet/website/articles/MistralChatAgent-count-token-usage.md deleted file mode 100644 index 026a004f67..0000000000 --- a/dotnet/website/articles/MistralChatAgent-count-token-usage.md +++ /dev/null @@ -1,28 +0,0 @@ -The following example shows how to create a `MistralAITokenCounterMiddleware` @AutoGen.Core.IMiddleware and count the token usage when chatting with @AutoGen.Mistral.MistralClientAgent. - -### Overview -To collect the token usage for the entire chat session, one easy solution is simply collect all the responses from agent and sum up the token usage for each response. To collect all the agent responses, we can create a middleware which simply saves all responses to a list and register it with the agent. To get the token usage information for each response, because in the example we are using @AutoGen.Mistral.MistralClientAgent, we can simply get the token usage from the response object. - -> [!NOTE] -> You can find the complete example in the [Example13_OpenAIAgent_JsonMode](https://github.com/ag2ai/ag2/tree/main/dotnet/sample/AutoGen.BasicSamples/Example14_MistralClientAgent_TokenCount.cs). - -- Step 1: Adding using statement -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example14_MistralClientAgent_TokenCount.cs?name=using_statements)] - -- Step 2: Create a `MistralAITokenCounterMiddleware` class which implements @AutoGen.Core.IMiddleware. This middleware will collect all the responses from the agent and sum up the token usage for each response. -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example14_MistralClientAgent_TokenCount.cs?name=token_counter_middleware)] - -- Step 3: Create a `MistralClientAgent` -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example14_MistralClientAgent_TokenCount.cs?name=create_mistral_client_agent)] - -- Step 4: Register the `MistralAITokenCounterMiddleware` with the `MistralClientAgent`. Note that the order of each middlewares matters. The token counter middleware needs to be registered before `mistralMessageConnector` because it collects response only when the responding message type is `IMessage` while the `mistralMessageConnector` will convert `IMessage` to one of @AutoGen.Core.TextMessage, @AutoGen.Core.ToolCallMessage or @AutoGen.Core.ToolCallResultMessage. -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example14_MistralClientAgent_TokenCount.cs?name=register_middleware)] - -- Step 5: Chat with the `MistralClientAgent` and get the token usage information from the response object. -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example14_MistralClientAgent_TokenCount.cs?name=chat_with_agent)] - -### Output -When running the example, the completion token count will be printed to the console. -```bash -Completion token count: 1408 # might be different based on the response -``` \ No newline at end of file diff --git a/dotnet/website/articles/MistralChatAgent-use-function-call.md b/dotnet/website/articles/MistralChatAgent-use-function-call.md deleted file mode 100644 index 56ea0ffd08..0000000000 --- a/dotnet/website/articles/MistralChatAgent-use-function-call.md +++ /dev/null @@ -1,41 +0,0 @@ -## Use tool in MistralChatAgent - -The following example shows how to enable tool support in @AutoGen.Mistral.MistralClientAgent by creating a `GetWeatherAsync` function and passing it to the agent. - -Firstly, you need to install the following packages: -```bash -dotnet add package AutoGen.Mistral -dotnet add package AutoGen.SourceGenerator -``` - -> [!Note] -> Tool support is only available in some mistral models. Please refer to the [link](https://docs.mistral.ai/capabilities/function_calling/#available-models) for tool call support in mistral models. - -> [!Note] -> The `AutoGen.SourceGenerator` package carries a source generator that adds support for type-safe function definition generation. For more information, please check out [Create type-safe function](./Create-type-safe-function-call.md). - -> [!NOTE] -> If you are using VSCode as your editor, you may need to restart the editor to see the generated code. - -Import the required namespace -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/MistralAICodeSnippet.cs?name=using_statement)] - -Then define a public partial `MistralAgentFunction` class and `GetWeather` method. The `GetWeather` method is a simple function that returns the weather of a given location that marked with @AutoGen.Core.FunctionAttribute. Marking the class as `public partial` together with the @AutoGen.Core.FunctionAttribute attribute allows the source generator to generate the @AutoGen.Core.FunctionContract for the `GetWeather` method. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/MistralAICodeSnippet.cs?name=weather_function)] - -Then create an @AutoGen.Mistral.MistralClientAgent and register it with @AutoGen.Mistral.Extension.MistralAgentExtension.RegisterMessageConnector* so it can support @AutoGen.Core.ToolCallMessage and @AutoGen.Core.ToolCallResultMessage. These message types are necessary to use @AutoGen.Core.FunctionCallMiddleware, which provides support for processing and invoking function calls. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/MistralAICodeSnippet.cs?name=create_mistral_function_call_agent)] - -Then create an @AutoGen.Core.FunctionCallMiddleware with `GetWeather` function When creating the middleware, we also pass a `functionMap` object which means the function will be automatically invoked when the agent replies a `GetWeather` function call. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/MistralAICodeSnippet.cs?name=create_get_weather_function_call_middleware)] - -After the function call middleware is created, register it with the agent so the `GetWeather` function will be passed to agent during chat completion. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/MistralAICodeSnippet.cs?name=register_function_call_middleware)] - -Finally, you can chat with the @AutoGen.Mistral.MistralClientAgent about weather! The agent will automatically invoke the `GetWeather` function to "get" the weather information and return the result. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/MistralAICodeSnippet.cs?name=send_message_with_function_call)] \ No newline at end of file diff --git a/dotnet/website/articles/OpenAIChatAgent-connect-to-third-party-api.md b/dotnet/website/articles/OpenAIChatAgent-connect-to-third-party-api.md deleted file mode 100644 index f4c70564e3..0000000000 --- a/dotnet/website/articles/OpenAIChatAgent-connect-to-third-party-api.md +++ /dev/null @@ -1,49 +0,0 @@ -The following example shows how to connect to third-party OpenAI API using @AutoGen.OpenAI.OpenAIChatAgent. - -[![](https://img.shields.io/badge/Open%20on%20Github-grey?logo=github)](https://github.com/ag2ai/ag2/blob/main/dotnet/sample/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs) - -## Overview -A lot of LLM applications/platforms support spinning up a chat server that is compatible with OpenAI API, such as LM Studio, Ollama, Mistral etc. This means that you can connect to these servers using the @AutoGen.OpenAI.OpenAIChatAgent. - -> [!NOTE] -> Some platforms might not support all the features of OpenAI API. For example, Ollama does not support `function call` when using it's openai API according to its [document](https://github.com/ollama/ollama/blob/main/docs/openai.md#v1chatcompletions) (as of 2024/05/07). -> That means some of the features of OpenAI API might not work as expected when using these platforms with the @AutoGen.OpenAI.OpenAIChatAgent. -> Please refer to the platform's documentation for more information. - -## Prerequisites -- Install the following packages: -```bash -dotnet add package AutoGen.OpenAI --version AUTOGEN_VERSION -``` - -- Spin up a chat server that is compatible with OpenAI API. -The following example uses Ollama as the chat server, and llama3 as the llm model. -```bash -ollama serve -``` - -## Steps -- Import the required namespaces: -[!code-csharp[](../../sample/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs?name=using_statement)] - -- Create a `CustomHttpClientHandler` class. - -The `CustomHttpClientHandler` class is used to customize the HttpClientHandler. In this example, we override the `SendAsync` method to redirect the request to local Ollama server, which is running on `http://localhost:11434`. - -[!code-csharp[](../../sample/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs?name=CustomHttpClientHandler)] - -- Create an `OpenAIChatAgent` instance and connect to the third-party API. - -Then create an @AutoGen.OpenAI.OpenAIChatAgent instance and connect to the OpenAI API from Ollama. You can customize the transport behavior of `OpenAIClient` by passing a customized `HttpClientTransport` instance. In the customized `HttpClientTransport` instance, we pass the `CustomHttpClientHandler` we just created which redirects all openai chat requests to the local Ollama server. - -[!code-csharp[](../../sample/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs?name=create_agent)] - -- Chat with the `OpenAIChatAgent`. -Finally, you can start chatting with the agent. In this example, we send a coding question to the agent and get the response. - -[!code-csharp[](../../sample/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs?name=send_message)] - -## Sample Output -The following is the sample output of the code snippet above: - -![output](../images/articles/ConnectTo3PartyOpenAI/output.gif) \ No newline at end of file diff --git a/dotnet/website/articles/OpenAIChatAgent-simple-chat.md b/dotnet/website/articles/OpenAIChatAgent-simple-chat.md deleted file mode 100644 index 867aff24af..0000000000 --- a/dotnet/website/articles/OpenAIChatAgent-simple-chat.md +++ /dev/null @@ -1,11 +0,0 @@ -The following example shows how to create an @AutoGen.OpenAI.OpenAIChatAgent and chat with it. - -Firsly, import the required namespaces: -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/OpenAICodeSnippet.cs?name=using_statement)] - -Then, create an @AutoGen.OpenAI.OpenAIChatAgent and chat with it: -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/OpenAICodeSnippet.cs?name=create_openai_chat_agent)] - -@AutoGen.OpenAI.OpenAIChatAgent also supports streaming chat via @AutoGen.Core.IAgent.GenerateStreamingReplyAsync*. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/OpenAICodeSnippet.cs?name=create_openai_chat_agent_streaming)] \ No newline at end of file diff --git a/dotnet/website/articles/OpenAIChatAgent-support-more-messages.md b/dotnet/website/articles/OpenAIChatAgent-support-more-messages.md deleted file mode 100644 index af6e60682b..0000000000 --- a/dotnet/website/articles/OpenAIChatAgent-support-more-messages.md +++ /dev/null @@ -1,6 +0,0 @@ -By default, @AutoGen.OpenAI.OpenAIChatAgent only supports the @AutoGen.Core.IMessage type where `T` is original request or response message from `Azure.AI.OpenAI`. To support more AutoGen built-in message types like @AutoGen.Core.TextMessage, @AutoGen.Core.ImageMessage, @AutoGen.Core.MultiModalMessage and so on, you can register the agent with @AutoGen.OpenAI.OpenAIChatRequestMessageConnector. The @AutoGen.OpenAI.OpenAIChatRequestMessageConnector will convert the message from AutoGen built-in message types to `Azure.AI.OpenAI.ChatRequestMessage` and vice versa. - -import the required namespaces: -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/OpenAICodeSnippet.cs?name=using_statement)] - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/OpenAICodeSnippet.cs?name=register_openai_chat_message_connector)] \ No newline at end of file diff --git a/dotnet/website/articles/OpenAIChatAgent-use-function-call.md b/dotnet/website/articles/OpenAIChatAgent-use-function-call.md deleted file mode 100644 index da12ae9e90..0000000000 --- a/dotnet/website/articles/OpenAIChatAgent-use-function-call.md +++ /dev/null @@ -1,33 +0,0 @@ -The following example shows how to create a `GetWeatherAsync` function and pass it to @AutoGen.OpenAI.OpenAIChatAgent. - -Firstly, you need to install the following packages: -```xml - - - - -``` - -> [!Note] -> The `AutoGen.SourceGenerator` package carries a source generator that adds support for type-safe function definition generation. For more information, please check out [Create type-safe function](./Create-type-safe-function-call.md). - -> [!NOTE] -> If you are using VSCode as your editor, you may need to restart the editor to see the generated code. - -Firstly, import the required namespaces: -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/OpenAICodeSnippet.cs?name=using_statement)] - -Then, define a public partial class: `Function` with `GetWeather` method -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/OpenAICodeSnippet.cs?name=weather_function)] - -Then, create an @AutoGen.OpenAI.OpenAIChatAgent and register it with @AutoGen.OpenAI.OpenAIChatRequestMessageConnector so it can support @AutoGen.Core.ToolCallMessage and @AutoGen.Core.ToolCallResultMessage. These message types are necessary to use @AutoGen.Core.FunctionCallMiddleware, which provides support for processing and invoking function calls. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/OpenAICodeSnippet.cs?name=openai_chat_agent_get_weather_function_call)] - -Then, create an @AutoGen.Core.FunctionCallMiddleware with `GetWeather` function and register it with the agent above. When creating the middleware, we also pass a `functionMap` to @AutoGen.Core.FunctionCallMiddleware, which means the function will be automatically invoked when the agent replies a `GetWeather` function call. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/OpenAICodeSnippet.cs?name=create_function_call_middleware)] - -Finally, you can chat with the @AutoGen.OpenAI.OpenAIChatAgent and invoke the `GetWeather` function. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/OpenAICodeSnippet.cs?name=chat_agent_send_function_call)] \ No newline at end of file diff --git a/dotnet/website/articles/OpenAIChatAgent-use-json-mode.md b/dotnet/website/articles/OpenAIChatAgent-use-json-mode.md deleted file mode 100644 index b476dfac8c..0000000000 --- a/dotnet/website/articles/OpenAIChatAgent-use-json-mode.md +++ /dev/null @@ -1,30 +0,0 @@ -The following example shows how to enable JSON mode in @AutoGen.OpenAI.OpenAIChatAgent. - -[![](https://img.shields.io/badge/Open%20on%20Github-grey?logo=github)](https://github.com/ag2ai/ag2/blob/main/dotnet/sample/AutoGen.OpenAI.Sample/Use_Json_Mode.cs) - -## What is JSON mode? -JSON mode is a new feature in OpenAI which allows you to instruct model to always respond with a valid JSON object. This is useful when you want to constrain the model output to JSON format only. - -> [!NOTE] -> Currently, JOSN mode is only supported by `gpt-4-turbo-preview` and `gpt-3.5-turbo-0125`. For more information (and limitations) about JSON mode, please visit [OpenAI API documentation](https://platform.openai.com/docs/guides/structured-outputs#json-mode). - -## How to enable JSON mode in OpenAIChatAgent. - -To enable JSON mode for @AutoGen.OpenAI.OpenAIChatAgent, set `responseFormat` to `ChatCompletionsResponseFormat.JsonObject` when creating the agent. Note that when enabling JSON mode, you also need to instruct the agent to output JSON format in its system message. - -[!code-csharp[](../../sample/AutoGen.OpenAI.Sample/Use_Json_Mode.cs?name=create_agent)] - -After enabling JSON mode, the `openAIClientAgent` will always respond in JSON format when it receives a message. - -[!code-csharp[](../../sample/AutoGen.OpenAI.Sample/Use_Json_Mode.cs?name=chat_with_agent)] - -When running the example, the output from `openAIClientAgent` will be a valid JSON object which can be parsed as `Person` class defined below. Note that in the output, the `address` field is missing because the address information is not provided in user input. - -[!code-csharp[](../../sample/AutoGen.OpenAI.Sample/Use_Json_Mode.cs?name=person_class)] - -The output will be: -```bash -Name: John -Age: 25 -Done -``` \ No newline at end of file diff --git a/dotnet/website/articles/Print-message-middleware.md b/dotnet/website/articles/Print-message-middleware.md deleted file mode 100644 index b0115970d7..0000000000 --- a/dotnet/website/articles/Print-message-middleware.md +++ /dev/null @@ -1,27 +0,0 @@ -@AutoGen.Core.PrintMessageMiddleware is a built-in @AutoGen.Core.IMiddleware that pretty print @AutoGen.Core.IMessage to console. - -> [!NOTE] -> @AutoGen.Core.PrintMessageMiddleware support the following @AutoGen.Core.IMessage types: -> - @AutoGen.Core.TextMessage -> - @AutoGen.Core.MultiModalMessage -> - @AutoGen.Core.ToolCallMessage -> - @AutoGen.Core.ToolCallResultMessage -> - @AutoGen.Core.Message -> - (streaming) @AutoGen.Core.TextMessageUpdate -> - (streaming) @AutoGen.Core.ToolCallMessageUpdate - -## Use @AutoGen.Core.PrintMessageMiddleware in an agent -You can use @AutoGen.Core.PrintMessageMiddlewareExtension.RegisterPrintMessage* to register the @AutoGen.Core.PrintMessageMiddleware to an agent. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/PrintMessageMiddlewareCodeSnippet.cs?name=PrintMessageMiddleware)] - -@AutoGen.Core.PrintMessageMiddlewareExtension.RegisterPrintMessage* will format the message and print it to console -![image](../images/articles/PrintMessageMiddleware/printMessage.png) - -## Streaming message support - -@AutoGen.Core.PrintMessageMiddleware also supports streaming message types like @AutoGen.Core.TextMessageUpdate and @AutoGen.Core.ToolCallMessageUpdate. If you register @AutoGen.Core.PrintMessageMiddleware to a @AutoGen.Core.IStreamingAgent, it will format the streaming message and print it to console if the message is of supported type. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/PrintMessageMiddlewareCodeSnippet.cs?name=print_message_streaming)] - -![image](../images/articles/PrintMessageMiddleware/streamingoutput.gif) diff --git a/dotnet/website/articles/Roundrobin-chat.md b/dotnet/website/articles/Roundrobin-chat.md deleted file mode 100644 index 0dcf064b07..0000000000 --- a/dotnet/website/articles/Roundrobin-chat.md +++ /dev/null @@ -1,33 +0,0 @@ -@AutoGen.Core.RoundRobinGroupChat is a group chat that invokes agents in a round-robin order. It's useful when you want to call multiple agents in a fixed sequence. For example, asking search agent to retrieve related information followed by a summarization agent to summarize the information. Beside, it also used by @AutoGen.Core.AgentExtension.SendAsync(AutoGen.Core.IAgent,AutoGen.Core.IAgent,System.String,System.Collections.Generic.IEnumerable{AutoGen.Core.IMessage},System.Int32,System.Threading.CancellationToken) in two agent chat. - -### Use @AutoGen.Core.RoundRobinGroupChat to implement a search-summarize chat flow - -```mermaid -flowchart LR - A[User] -->|Ask a question| B[Search Agent] - B -->|Retrieve information| C[Summarization Agent] - C -->|Summarize result| A[User] -``` - -> [!NOTE] -> Complete code can be found in [Example11_Sequential_GroupChat_Example](https://github.com/ag2ai/ag2/blob/main/dotnet/sample/AutoGen.BasicSamples/Example11_Sequential_GroupChat_Example.cs); - -Step 1: Add required using statements - -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example11_Sequential_GroupChat_Example.cs?name=using_statement)] - -Step 2: Create a `bingSearch` agent using @AutoGen.SemanticKernel.SemanticKernelAgent - -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example11_Sequential_GroupChat_Example.cs?name=CreateBingSearchAgent)] - -Step 3: Create a `summarization` agent using @AutoGen.SemanticKernel.SemanticKernelAgent - -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example11_Sequential_GroupChat_Example.cs?name=CreateSummarizerAgent)] - -Step 4: Create a @AutoGen.Core.RoundRobinGroupChat and add `bingSearch` and `summarization` agents to it - -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example11_Sequential_GroupChat_Example.cs?name=Sequential_GroupChat_Example)] - -Output: - -![Searcher-Summarizer](../images/articles/SequentialGroupChat/SearcherSummarizer.gif) diff --git a/dotnet/website/articles/Run-dotnet-code.md b/dotnet/website/articles/Run-dotnet-code.md deleted file mode 100644 index 82946f8593..0000000000 --- a/dotnet/website/articles/Run-dotnet-code.md +++ /dev/null @@ -1,61 +0,0 @@ -`AutoGen` provides a built-in feature to run code snippet from agent response. Currently the following languages are supported: -- dotnet - -More languages will be supported in the future. - -## What is a code snippet? -A code snippet in agent response is a code block with a language identifier. For example: - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/RunCodeSnippetCodeSnippet.cs?name=code_snippet_1_3)] - -## Why running code snippet is useful? -The ability of running code snippet can greatly extend the ability of an agent. Because it enables agent to resolve tasks by writing code and run it, which is much more powerful than just returning a text response. - -For example, in data analysis scenario, agent can resolve tasks like "What is the average of the sales amount of the last 7 days?" by firstly write a code snippet to query the sales amount of the last 7 days, then calculate the average and then run the code snippet to get the result. - -> [!WARNING] -> Running arbitrary code snippet from agent response could bring risks to your system. Using this feature with caution. - -## Use dotnet interactive kernel to execute code snippet? -The built-in feature of running dotnet code snippet is provided by [dotnet-interactive](https://github.com/dotnet/interactive). To run dotnet code snippet, you need to install the following package to your project, which provides the intergraion with dotnet-interactive: - -```xml - -``` - -Then you can use @AutoGen.DotnetInteractive.DotnetInteractiveKernelBuilder* to create a in-process dotnet-interactive composite kernel with C# and F# kernels. -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/RunCodeSnippetCodeSnippet.cs?name=code_snippet_1_1)] - -After that, use @AutoGen.DotnetInteractive.Extension.RunSubmitCodeCommandAsync* method to run code snippet. The method will return the result of the code snippet. -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/RunCodeSnippetCodeSnippet.cs?name=code_snippet_1_2)] - -## Run python code snippet -To run python code, firstly you need to have python installed on your machine, then you need to set up ipykernel and jupyter in your environment. - -```bash -pip install ipykernel -pip install jupyter -``` - -After `ipykernel` and `jupyter` are installed, you can confirm the ipykernel is installed correctly by running the following command: - -```bash -jupyter kernelspec list -``` - -The output should contain all available kernels, including `python3`. - -```bash -Available kernels: - python3 /usr/local/share/jupyter/kernels/python3 - ... -``` - -Then you can add the python kernel to the dotnet-interactive composite kernel by calling `AddPythonKernel` method. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/RunCodeSnippetCodeSnippet.cs?name=code_snippet_1_4)] - -## Further reading -You can refer to the following examples for running code snippet in agentic workflow: -- Dynamic_GroupChat_Coding_Task: [![](https://img.shields.io/badge/Open%20on%20Github-grey?logo=github)](https://github.com/ag2ai/ag2/blob/main/dotnet/sample/AutoGen.BasicSample/Example04_Dynamic_GroupChat_Coding_Task.cs) -- Dynamic_GroupChat_Calculate_Fibonacci: [![](https://img.shields.io/badge/Open%20on%20Github-grey?logo=github)](https://github.com/ag2ai/ag2/blob/main/dotnet/sample/AutoGen.BasicSample/Example07_Dynamic_GroupChat_Calculate_Fibonacci.cs) diff --git a/dotnet/website/articles/Two-agent-chat.md b/dotnet/website/articles/Two-agent-chat.md deleted file mode 100644 index 2fe5f8401e..0000000000 --- a/dotnet/website/articles/Two-agent-chat.md +++ /dev/null @@ -1,19 +0,0 @@ -In `AutoGen`, you can start a conversation between two agents using @AutoGen.Core.AgentExtension.InitiateChatAsync* or one of @AutoGen.Core.AgentExtension.SendAsync* APIs. When conversation starts, the sender agent will firstly send a message to receiver agent, then receiver agent will generate a reply and send it back to sender agent. This process will repeat until either one of the agent sends a termination message or the maximum number of turns is reached. - -> [!NOTE] -> A termination message is an @AutoGen.Core.IMessage which content contains the keyword: @AutoGen.Core.GroupChatExtension.TERMINATE. To determine if a message is a terminate message, you can use @AutoGen.Core.GroupChatExtension.IsGroupChatTerminateMessage*. - -## A basic example - -The following example shows how to start a conversation between the teacher agent and student agent, where the student agent starts the conversation by asking teacher to create math questions. - -> [!TIP] -> You can use @AutoGen.Core.PrintMessageMiddlewareExtension.RegisterPrintMessage* to pretty print the message replied by the agent. - -> [!NOTE] -> The conversation is terminated when teacher agent sends a message containing the keyword: @AutoGen.Core.GroupChatExtension.TERMINATE. - -> [!NOTE] -> The teacher agent uses @AutoGen.Core.MiddlewareExtension.RegisterPostProcess* to register a post process function which returns a hard-coded termination message when a certain condition is met. Comparing with putting the @AutoGen.Core.GroupChatExtension.TERMINATE keyword in the prompt, this approach is more robust especially when a weaker LLM model is used. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example02_TwoAgent_MathChat.cs?name=code_snippet_1)] diff --git a/dotnet/website/articles/Use-function-call.md b/dotnet/website/articles/Use-function-call.md deleted file mode 100644 index 8c0f172e7d..0000000000 --- a/dotnet/website/articles/Use-function-call.md +++ /dev/null @@ -1,43 +0,0 @@ -## Use function call in AutoGen agent - -Typically, there are three ways to pass a function definition to an agent to enable function call: -- Pass function definitions when creating an agent. This only works if the agent supports pass function call from its constructor. -- Passing function definitions in @AutoGen.Core.GenerateReplyOptions when invoking an agent -- Register an agent with @AutoGen.Core.FunctionCallMiddleware to process and invoke function calls. - -> [!NOTE] -> To use function call, the underlying LLM model must support function call as well for the best experience. If the model does not support function call, it's likely that the function call will be ignored and the model will reply with a normal response even if a function call is passed to it. - -## Pass function definitions when creating an agent -In some agents like @AutoGen.AssistantAgent or @AutoGen.OpenAI.GPTAgent, you can pass function definitions when creating the agent - -Suppose the `TypeSafeFunctionCall` is defined in the following code snippet: -[!code-csharp[TypeSafeFunctionCall](../../sample/AutoGen.BasicSamples/CodeSnippet/TypeSafeFunctionCallCodeSnippet.cs?name=weather_report)] - -You can then pass the `WeatherReport` to the agent when creating it: -[!code-csharp[assistant agent](../../sample/AutoGen.BasicSamples/CodeSnippet/FunctionCallCodeSnippet.cs?name=code_snippet_4)] - -## Passing function definitions in @AutoGen.Core.GenerateReplyOptions when invoking an agent -You can also pass function definitions in @AutoGen.Core.GenerateReplyOptions when invoking an agent. This is useful when you want to override the function definitions passed to the agent when creating it. - -[!code-csharp[assistant agent](../../sample/AutoGen.BasicSamples/CodeSnippet/FunctionCallCodeSnippet.cs?name=overrider_function_contract)] - -## Register an agent with @AutoGen.Core.FunctionCallMiddleware to process and invoke function calls -You can also register an agent with @AutoGen.Core.FunctionCallMiddleware to process and invoke function calls. This is useful when you want to process and invoke function calls in a more flexible way. - -[!code-csharp[assistant agent](../../sample/AutoGen.BasicSamples/CodeSnippet/FunctionCallCodeSnippet.cs?name=register_function_call_middleware)] - -## Invoke function call inside an agent -To invoke a function instead of returning the function call object, you can pass its function call wrapper to the agent via `functionMap`. - -You can then pass the `WeatherReportWrapper` to the agent via `functionMap`: -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/FunctionCallCodeSnippet.cs?name=code_snippet_6)] - -When a function call object is returned, the agent will invoke the function and uses the return value as response rather than returning the function call object. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/FunctionCallCodeSnippet.cs?name=code_snippet_6_1)] - -## Invoke function call by another agent -You can also use another agent to invoke the function call from one agent. This is a useful pattern in two-agent chat, where one agent is used as a function proxy to invoke the function call from another agent. Once the function call is invoked, the result can be returned to the original agent for further processing. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/FunctionCallCodeSnippet.cs?name=two_agent_weather_chat)] \ No newline at end of file diff --git a/dotnet/website/articles/Use-graph-in-group-chat.md b/dotnet/website/articles/Use-graph-in-group-chat.md deleted file mode 100644 index 1cc97e50fe..0000000000 --- a/dotnet/website/articles/Use-graph-in-group-chat.md +++ /dev/null @@ -1,25 +0,0 @@ -Sometimes, you may want to add more control on how the next agent is selected in a @AutoGen.Core.GroupChat based on the task you want to resolve. For example, in the previous [code writing example](./Group-chat.md), the original code interpreter workflow can be improved by the following diagram because it's not necessary for `admin` to directly talk to `reviewer`, nor it's necessary for `coder` to talk to `runner`. - -```mermaid -flowchart TD - A[Admin] -->|Ask coder to write code| B[Coder] - B -->|Ask Reviewer to review code| C[Reviewer] - C -->|Ask Runner to run code| D[Runner] - D -->|Send result if succeed| A[Admin] - D -->|Ask coder to fix if failed| B[Coder] - C -->|Ask coder to fix if not approved| B[Coder] -``` - -By having @AutoGen.Core.GroupChat to follow a specific graph flow, we can bring prior knowledge to group chat and make the conversation more efficient and robust. This is where @AutoGen.Core.Graph comes in. - -### Create a graph -The following code shows how to create a graph that represents the diagram above. The graph doesn't need to be a finite state machine where each state can only have one legitimate next state. Instead, it can be a directed graph where each state can have multiple legitimate next states. And if there are multiple legitimate next states, the `admin` agent of @AutoGen.Core.GroupChat will decide which one to go based on the conversation context. - -> [!TIP] -> @AutoGen.Core.Graph supports conditional transitions. To create a conditional transition, you can pass a lambda function to `canTransitionAsync` when creating a @AutoGen.Core.Transition. The lambda function should return a boolean value indicating if the transition can be taken. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example07_Dynamic_GroupChat_Calculate_Fibonacci.cs?name=create_workflow)] - -Once the graph is created, you can pass it to the group chat. The group chat will then use the graph along with admin agent to orchestrate the conversation flow. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/Example07_Dynamic_GroupChat_Calculate_Fibonacci.cs?name=create_group_chat_with_workflow)] \ No newline at end of file diff --git a/dotnet/website/articles/function-comparison-page-between-python-AutoGen-and-autogen.net.md b/dotnet/website/articles/function-comparison-page-between-python-AutoGen-and-autogen.net.md deleted file mode 100644 index e81b96f11b..0000000000 --- a/dotnet/website/articles/function-comparison-page-between-python-AutoGen-and-autogen.net.md +++ /dev/null @@ -1,37 +0,0 @@ -### Function comparison between Python AutoGen and AutoGen\.Net - - -#### Agentic pattern - -| Feature | AutoGen | AutoGen\.Net | -| :---------------- | :------ | :---- | -| Code interpreter | run python code in local/docker/notebook executor | run csharp code in dotnet interactive executor | -| Single agent chat pattern | βœ”οΈ | βœ”οΈ | -| Two agent chat pattern | βœ”οΈ | βœ”οΈ | -| group chat (include FSM)| βœ”οΈ | βœ”οΈ (using workflow for FSM groupchat) | -| Nest chat| βœ”οΈ | βœ”οΈ (using middleware pattern)| -|Sequential chat | βœ”οΈ | ❌ (need to manually create task in code) | -| Tool | βœ”οΈ | βœ”οΈ | - - -#### LLM platform support - -ℹ️ Note - -``` Other than the platforms list below, AutoGen.Net also supports all the platforms that semantic kernel supports via AutoGen.SemanticKernel as a bridge ``` - -| Feature | AutoGen | AutoGen\.Net | -| :---------------- | :------ | :---- | -| OpenAI (include third-party) | βœ”οΈ | βœ”οΈ | -| Mistral | βœ”οΈ| βœ”οΈ| -| Ollama | βœ”οΈ| βœ”οΈ| -|Claude |βœ”οΈ |βœ”οΈ| -|Gemini (Include Vertex) | βœ”οΈ | βœ”οΈ | - -#### Popular Contrib Agent support - - -| Feature | AutoGen | AutoGen\.Net | -| :---------------- | :------ | :---- | -| Rag Agent | βœ”οΈ| ❌ | -| Web surfer | βœ”οΈ| ❌ | diff --git a/dotnet/website/articles/getting-start.md b/dotnet/website/articles/getting-start.md deleted file mode 100644 index d29dfab5db..0000000000 --- a/dotnet/website/articles/getting-start.md +++ /dev/null @@ -1,26 +0,0 @@ -### Get start with AutoGen for dotnet -[![dotnet-ci](https://github.com/ag2ai/ag2/actions/workflows/dotnet-build.yml/badge.svg)](https://github.com/ag2ai/ag2/actions/workflows/dotnet-build.yml) -[![Discord](https://img.shields.io/discord/1153072414184452236?logo=discord&style=flat)](https://discord.gg/pAbnFJrkgZ) -[![NuGet version](https://badge.fury.io/nu/AutoGen.Core.svg)](https://badge.fury.io/nu/AutoGen.Core) - -Firstly, add `AutoGen` package to your project. - -```bash -dotnet add package AutoGen -``` - -> [!NOTE] -> For more information about installing packages, please check out the [installation guide](Installation.md). - -Then you can start with the following code snippet to create a conversable agent and chat with it. - -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/GetStartCodeSnippet.cs?name=snippet_GetStartCodeSnippet)] -[!code-csharp[](../../sample/AutoGen.BasicSamples/CodeSnippet/GetStartCodeSnippet.cs?name=code_snippet_1)] - -### Tutorial -Getting started with AutoGen.Net by following the [tutorial](../tutorial/Chat-with-an-agent.md) series. -### Examples -You can find more examples under the [sample project](https://github.com/ag2ai/ag2/tree/dotnet/dotnet/sample/AutoGen.BasicSamples). - -### Report a bug or request a feature -You can report a bug or request a feature by creating a new issue in the [github issue](https://github.com/ag2ai/ag2/issues) and specifying the label "dotnet" diff --git a/dotnet/website/articles/toc.yml b/dotnet/website/articles/toc.yml deleted file mode 100644 index 2335ebf092..0000000000 --- a/dotnet/website/articles/toc.yml +++ /dev/null @@ -1,126 +0,0 @@ -- name: Getting start - items: - - name: Overview - href: ../index.md - - name: Installation - href: Installation.md - - name: agent - items: - - name: agent overview - href: Agent-overview.md - - name: assistant agent - href: Create-an-agent.md - - name: user proxy agent - href: Create-a-user-proxy-agent.md - - name: Chat with an agent using user proxy agent - href: Two-agent-chat.md - # - name: Create your own agent - # href: Create-your-own-agent.md - - name: built-in messages - href: Built-in-messages.md - - name: function call - items: - - name: Function call overview - href: Function-call-overview.md - - name: Create type-safe function call using AutoGen.SourceGenerator - href: Create-type-safe-function-call.md - - name: Use function call in an agent - href: Use-function-call.md - - name: Function call with local model - href: Function-call-with-ollama-and-litellm.md - - name: middleware - items: - - name: middleware overview - href: Middleware-overview.md - - name: built-in middleware and use case - items: - - name: print message - href: Print-message-middleware.md - # - name: function call - # href: Function-call-middleware.md - - name: group chat - items: - - name: group chat overview - href: Group-chat-overview.md - - name: round robin group chat - href: Roundrobin-chat.md - - name: dynamic group chat - href: Group-chat.md - - name: use graph to control dynamic group chat - href: Use-graph-in-group-chat.md - -- name: AutoGen.DotnetInteractive - items: - - name: Execute code snippet - href: Run-dotnet-code.md - -- name: AutoGen.OpenAI - items: - - name: Overview - href: AutoGen-OpenAI-Overview.md - - name: Examples - items: - - name: Simple chat and streaming chat - href: OpenAIChatAgent-simple-chat.md - - name: Support more AutoGen built-in messages - href: OpenAIChatAgent-support-more-messages.md - - name: Use function call in OpenAIChatAgent - href: OpenAIChatAgent-use-function-call.md - - name: Use json mode in OpenAIChatAgent - href: OpenAIChatAgent-use-json-mode.md - - name: Connect to third-party OpenAI API endpoints. - href: OpenAIChatAgent-connect-to-third-party-api.md - -- name: AutoGen.SemanticKernel - items: - - name: Overview - href: AutoGen.SemanticKernel/AutoGen-SemanticKernel-Overview.md - - name: Chat with Semantic Kernel Agent - href: AutoGen.SemanticKernel/SemanticKernelAgent-simple-chat.md - - name: Chat with Semantic Kernel Chat Agent - href: AutoGen.SemanticKernel/SemanticKernelChatAgent-simple-chat.md - - name: Support AutoGen built-in messages - href: AutoGen.SemanticKernel/SemanticKernelAgent-support-more-messages.md - - name: Use kernel plugin in other agents - href: AutoGen.SemanticKernel/Use-kernel-plugin-in-other-agents.md - -- name: AutoGen.Ollama - items: - - name: Examples - items: - - name: Chat with LLaMA - href: AutoGen.Ollama/Chat-with-llama.md - - name: MultiModal Chat with LLaVA - href: AutoGen.Ollama/Chat-with-llava.md - -- name: AutoGen.Gemini - items: - - name: Overview - href: AutoGen.Gemini/Overview.md - - name: Examples - items: - - name: Chat with Google AI Gemini - href: AutoGen.Gemini/Chat-with-google-gemini.md - - name: Chat with Vertex AI Gemini - href: AutoGen.Gemini/Chat-with-vertex-gemini.md - - name: Function call with Gemini - href: AutoGen.Gemini/Function-call-with-gemini.md - - name: Image chat with Gemini - href: AutoGen.Gemini/Image-chat-with-gemini.md - -- name: AutoGen.Mistral - items: - - name: Overview - href: AutoGen-Mistral-Overview.md - - name: Examples - items: - - name: Use function call in MistralChatAgent - href: MistralChatAgent-use-function-call.md - - name: Count token usage in MistralChatAgent - href: MistralChatAgent-count-token-usage.md - -- name: AutoGen.LMStudio - items: - - name: Consume LLM server from LM Studio - href: Consume-LLM-server-from-LM-Studio.md - diff --git a/dotnet/website/docfx.json b/dotnet/website/docfx.json deleted file mode 100644 index aaa796994b..0000000000 --- a/dotnet/website/docfx.json +++ /dev/null @@ -1,72 +0,0 @@ -{ - "metadata": [ - { - "src": [ - { - "files": ["src/**/*.csproj"], - "src": "../" - } - ], - "dest": "api", - "includePrivateMembers": false, - "disableGitFeatures": false, - "disableDefaultFilter": false, - "noRestore": false, - "namespaceLayout": "flattened", - "memberLayout": "samePage", - "allowCompilationErrors": false, - "filter": "filterConfig.yml" - } - ], - "build": { - "content": [ - { - "files": [ - "api/**.yml", - "api/index.md" - ] - }, - { - "files": [ - "articles/**.md", - "articles/**/toc.yml", - "tutorial/**.md", - "tutorial/**/toc.yml", - "release_note/**.md", - "release_note/**/toc.yml", - "toc.yml", - "*.md" - ] - } - ], - "resource": [ - { - "files": [ - "images/**" - ] - } - ], - "output": "_site", - "globalMetadataFiles": [], - "fileMetadataFiles": [], - "template": [ - "default", - "modern", - "template" - ], - "globalMetadata":{ - "_appTitle": "AutoGen for .NET", - "_appName": "AutoGen for .NET", - "_appLogoPath": "images/ag2.ico", - "_appFooter": "AutoGen for .NET", - "_appFaviconPath": "images/ag2.ico", - "_gitContribute": { - "repo": "https://github.com/ag2ai/ag2.git", - "branch": "dotnet" - } - }, - "postProcessors": [], - "keepFileLink": false, - "disableGitFeatures": false - } -} \ No newline at end of file diff --git a/dotnet/website/filterConfig.yml b/dotnet/website/filterConfig.yml deleted file mode 100644 index 936ecbc671..0000000000 --- a/dotnet/website/filterConfig.yml +++ /dev/null @@ -1,3 +0,0 @@ -apiRules: -- exclude: - uidRegex: ^AutoGen.SourceGenerator \ No newline at end of file diff --git a/dotnet/website/images/ag.ico b/dotnet/website/images/ag.ico deleted file mode 100644 index f1789673b09252f61aedc8932f2dfecb8cd68e8d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3126 zcmY*bX>3(h5PqO2XhFKr_O*TOqwU-8eYX{g1T`W`SQ3_me?&4pJ3V*qoNs2nnK}0i7}h7s zA%A@^`|)$zzYdN>tni2Thr8)d1rA46IGkmgn6o)d8R5ty7G{H_h*4mHS#dOKxI_tKvGpYih9hdi53MjR z!BJIrqrG~ilz+s{K?MbO{?=)ESR!t*~j(2oK8(jNp+<;6OxjW zTuF(^@iDP!DQVhn~A34;jdak`I2A zI+*rkcwT+!#hMev)7}|aUsKWA(%5pPCV#=?Zx&C@{V;ds=ZojRH=?e#CLHoc$HaH< z+V$k&;<~!(zBxGoua{2Z+>@h^#f+-98&lK4Gb$7a_K1nAIJ9N;^69m8XQH0z^!BLX zKkq5Jbnc{O8i&fuW=$A!yy{2?$J0M<{%+^G{JaUn%F8zQ?cFQr4*-*FN1Bd3E@eaK z5rPbuNOq=tQ}9Lk_LYrIS2EqMWuGmmJ$1OTp}Kq2bLY-hR~_D6T3Xz}(ecdj@_Toh zZ(Tost@%Q>Wd(czf(t&9i$UiltwIi4Mhuzgra`au11)#&-+$%hem~?dy?(X6;?UlX z4#$nF7n&MR@7%flsSZy+xO=JP;;9>#PPaX79XV_WdMDE0bd? z-z5r_keZU#+p82Uz2{r+$Bm+dI} z>SDvm_}F+?vTJ_chv>U2SI*7No0%QX8aaIE`I?H0ml{xv;GYF)mpUn8BQvu&PT9y< zGcQb#8Sn?9yLD^5S=(^73V=%%&Q3{kp+Zue2@md_hotq+wdTg^rgK#{S}x**7kQ!w(0l!AopZ?4PW~1?FIk*e*5vC z4<7&ZTT}h95rYTd;G$#pAh^k%l0zF2@lX<$Bnr)bFc9>(Q~RTHEPqB%IhKW9M1jN2 z85a}GbZ4X{#m2;WGrhvI-9$nQ*uV!4=Rc5HNZJc{WIAjb5s2aU1%m#dI;NSjqh^eu zP!J7Cw`HRZ#WiuXKC#BNO7XdtGek2^eSMU|mKu=Hr6y5JLZEDYtThn&p!{$a>v~+< zYq+G~?R9G}Xi-RRhvho0uzY{QQHv(#G=D45~&<)LFMz#{4Zh~2}=L~ diff --git a/dotnet/website/images/ag.svg b/dotnet/website/images/ag.svg deleted file mode 100644 index eba3ee9528..0000000000 --- a/dotnet/website/images/ag.svg +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - - diff --git a/dotnet/website/images/articles/ConnectTo3PartyOpenAI/output.gif b/dotnet/website/images/articles/ConnectTo3PartyOpenAI/output.gif deleted file mode 100644 index 3c037e919dabb0a1c43e4a3b8437ac6dba33ec5d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 121452 zcmeFYXH=8R`!+gBp7izv5JEMfH!<`gN(w?i#GnY+Ls3vs10tf^l17ml5ET_Q6crl= zL`7w5=pYtQwqo4@ELeB2f-P_M{{74EJ?H$-x3kvy@Xj;$+_ToRl6=WMbIo;K6B-uc z=N})3bU=L7xdQ+Y0P%PKC-ez{L?8hY`A5yw|NPxC80_B-_xFRr;ql18&^IEUK-3RJqJBys>XSZ51QLlz`d5(1Br=h#AN~~-5`{t`>w`+BP^lD{ z0>dzsMullK*Z?*#pcxp_3=Ito3=Is83=NHpjHXO6GB%oGJjK|=*x1CxgibdxHKCi* zO-)VdW@e`5re@}5<`x#_mX?-QR#uFuQyGk@Oa{}M$+WSywz09cwXxMVJ3Cu@J9`Iv zdq)QcCkH2IM`vdzCw;g$ySTc#PMhZH=IZ7)&274y8;iyAnC{`>?(X5?;pyq=<>~3| z?d{{^?d#*?=j-e5=NI7bKV!y>z`($nGXrPMoXHNF#o@5I91f4mogEw;5;8k9G=v`- z8Wt879v(htPIyE_#N4@aBO@aP^X5fG2^P$s9~~VXvv6TdOiXNS%%Zrs#fuj$S+Zo= z(xuCnEnBf-g;2O6AwE7KAt5O-Vdct|$*Wc+Cnv8?NnW#Nb?Vx+si~>!*R9*Ie*K0G z8`4A@($ms5rl)6Y+_)(tBQtZ;=FOQ|S(~?P-m-PemfyB+-S*pW+qZAqv19wrojb(h zof3&yDwW7((rmdrCp$YgCnqmAcUN9se*Uh4g8ah5!lI(WlHy`TNpY#7q_kAAyR3Bg z?%jLJ%lGcxvv1$tihcX`@2@y;VE@4b2P!Ko4^2F)Wr_P)?eYXCL>g?IZ236C!bLY=DUAS<*`Qn93mzu9! zxqS8N@2%=6KsBWFi1;Bf@AJ=|C*guWh_lMtE!z5`eN z9Q<7-x&Kil|EiGxFUmt-7XPO({!d~2|EVx^|JHCi@a}Qg$F`hS8P3poX^*O^Jr}kK zuS^;3+&?4JW6k3p_nITc3|_uPSYvJPZpQ^Dm-aRu?c2vnyj~f0?%2(P7N5(bI1R^d zA7RS^qN6Y+;e~#?otIs2I&t^7peFoKemdo#U10W_C)dxP%C}vuX)&F1;q-%ZDL2~| z^<6mg=%Q%!`k^@&>z`c7`uO@u-^H_kUK68;RuRpr_zNK!P5rIS4Wr!(n>mLgE;T;y ztMK$aT-DyAxUJ+BSj`P+D!p5`;MDS)m(RbsuS)DYJon0lcaNGkzIl4{%EiC_RLh9- z&Qxm(MtgREp}(nmI=zmu(q;J#=pEpSOO$^7SuGdx381ysIsfAIExc$KAgA z`}eQf(Y_<|u3i24ed6PrKW|^Vrqcn8qecuFr)VTN`#O#E{puGQ864s`EH{ct8P2A! zsvFL+$Q&EaW#l-H0!mAetMO~&NX&9%~FArtCIPvoEwlA+<9=VgYmrDppT0K^kXMb|6y3ptKSdDa* z1YMOLv*y*&idC17Qcr-9=4K z!N`Td{I%~djU9WVIlWVpwpOL>a`~&})4jDDHdhkbx*Phxy!q=Iz`APHUtTGYQx*tL zzHcM>ZmU-kX1I=b7{#WJchZv!M}N22{C2#Xky~loYP&D>L$Bj;($Oy0i*G;lvASJ9 z-tfAg`tc^5fBNICz^`vN-gd@L`_vz7vfbkr-{H)sI}yI`KHU|BPV-I*ie2~lkGSMB zpYJW*{OL*4DeV>kM6rpaD&&p%hPd?v1qki&5<>vR3FO|7gJ}+wbt^f9_ z?s)yT*ZcC{e|w|qcKiOeX~V5=@0ypyw7+lp`u_W0YV7nM+76QqKgN3;&i?pt!}qTr zANxb6|NJxb2|4gv35)w3zV<0GzCEp! zts>cXBJ;fwzs=x)R_+*4MTqTbGye5n$||fPCG*?O9%^&^B6_f!d)lqOX>-}o5{fRD z-(hVwp2v@{Mlb8>un!pDB`B;mI!>QSTO75CbEDe$Vo#^rR`+?~q#AlRzsuu5Ltb=5 zjoJO4E}zTeMK`ZhTg?618SrqtIO|4@RjFt9tmTwqF{ze;4eQ}z`b+XEA6uF9_Jjm{ zC~cTiYl}_p319r7tm4KqhC^@fHO`0KO43os&{yXp52SB65OLHww)c9BnYLUNZSIsD z*0=cKhrKQNN8N6|xW4?`hka_&F>Pj8M8oihik^sL9{ZdQCj=O-X)8SDbv$e?XV%99 z5sAlqF81DBZ*6!`6TH~FJM32a0nhB=h~%3sy|*$ie>{{KeLOHR^VZgf9}iEoJPrK% zKEi0LvNmpxp@7LcY&Wz3CXA8y0`L99{7AZ z#Zt5IzVpfY%b(APo;xpk8Girr!_W0k6HhJwdOcR%`T4Av{FXl+KGbIRMV0aHRDwz0 zPy_x;gQ6&P25ruR>x;iMhNhlg<=eODVAz*)O7gmCGv++JbKpx;-I>ai*uIDNUb~-H z6`e^DNpBMeAQ4 znDgZI;;)y7>`F5(_C0z3aM~3O`D|AAoTpt`Ut7lJZn$^7@9CGzUw>~{cy`;%Ie!+n ze!V(z^Xv}E;y-_V`+Dul!?R*+ga#R8sK!OAq^0r)m>CnT@Oq`hA!4}1ZK92SOSHxM z#xUi#iFWo0RbFVzJo3Sb4#&fkyx1Ef#ycxISjFR#1reiW$A>!oZZ#B>T}G|G50(2+ z8Wkojqt@n=J&`{f%Jv0zGiOZpewy7_emvs&4Ufs|ab1?>7jLY5A2Zn}q?|*ai+JI2 zx1ld3^4$8)8!vpW`2W6rRq?q>yW=kc9xc+B@wvlaUCaHyPu|{t<(v{rj+OKK=E`x+%Zu&M@cPynY_U0PP zs7=4I+?m|NZIz@oWFb7XVO!02u3kPX}~(3%*MEH?%PL zRHdroNkYqUm&R2IynH&Dke0>k3C;#UK9tgs#3}T3G%jQgS1Vu}>X)(7>bnhrU~5O- zfDOOEn3Gy7MznWbGoQVX(nPDyhRr>BmCdx;+-N50o}xXy4R@FBRQc?cs5-Qu!bjfb zmmXhq01=|QX~c)b4jcD8OybOcj>lr>Ra-FT+l3q2y8WX0_aqkXg97|Sjvt?QLsM2= z{lVj6?@}$Xduw~EhiU%P7cWh5ve}QLxkT9#KQ2rPPmqR0Mwp`k&U@YOnnSDQltb43 zqt#J1t6JN9<^n11nM4|{T7ME$?l+!$V2|HWwYP3J;7#db}o^M|8f;q1D`;B@`SJE8i z=uLUtd31%=5N{0Xq*Uv$AMzX#h+9x>Ig_Nt`m$I99C~TpXc+w~RMj^fJVO#eguv@Q zIRq$gIMDc-E-yf0aRUSb_eh(bmPUe?00DCZo|Xu;?NIqhu^C|8Dey~m)%3VO79-dO zFNc8G@L=3V5|CsVmYF`N@QAcuR*Mq5bOJTe#k$mg0W}Q3MUMOt={&nolCaQgK+DDX zG~Z~OOEr$*;Xw$l76J{aI#ln>s*Z+mhp$89Sm$Gp4Z9c@#tez{eHqAz0GfP&;#U3% zQDURZ|s0DmorCN1FM{ z`hF^wG}4U-s?bA=y`F-esWt8Ee|x#__VVxVT4Kv*ytDo6wv88;ef7Jx$K(?aDdZZuh7+=MsQ}jrm-!RhV zXpCL&AH zFDt%t9Jg5@4nKb~+%f?3w3#VK%wcjJV{mU&vS8+{)AgzDs!;0GS;e?lk`G-$NP{~& z$isic7f`m^T%T#pa$Qu%wZ#P(jvOg5pB2!Qw@sU8APFx{>58ae+&3&Eguw}ujmspN zdzQMT-1itZ%wlE3Wn0dzvU9IjR;3@vOuh}=fL~hD3q14qk|kYl53XG$sc&_zviveh zz8PW5F55V~qJ#k@kahX}1@7WuA3*Y(YTG$UU;pHnAyR4V2tw(VM^@htDKUJ>dA4lZ z`$OFj$TKqJNc3xG`$D5?l68DG}ON3jrWHf^voM<2;6r>iv2~=?CY+}y=X+6>Wp-;NkgSnPF%F| zGta4^zZHR(-k9G5oP6#}@ zZH_Z^9tHH}n)GlZ=Q!(py9)N5E+}9rR_u8eR~>26E4S0D{?27s7P|vZ!{kOdCMaV*eYNGEF zL4MNRR&>|;`H>8Qd?k-C)|~bfc{~Lb;T5T7$C5ZhyZhYk(Oc*GA5-x{0#r?-IHs1L zPIcPwERpEZUMC83@5>O}@*3QRnq$gTqtCW<^zgcTTh-D?syGFup0d8Q>-0R@gp8P$nq67RGe9vm5|I%NZvgt)8|46 zC>i8Uq6EaXu@KMn`<)EAm~?+ae+E(JMdn%4R{jVj6^e_K3lj}@reFizXb4zsZ8_8RLJjDP$&B%6c5n~Y= ziHSndw+b0g>}+bIoYrPjQ`p#>rzNOHfy_F%6@AHwZ`i`f20v;Dv3XKsxj=61I*iIn z$u*P<5R3{n6zhm20O3ToOVg{V-WmiBsmQidD)DntxU?)d(!{mZa?=n3I?P5p2ezVn znxuHPN(7 z9dKMBp?xnsVim*vyi@#Qam{}IhpCqAcEMuUxOvFS&2Kg?u70aRHfDD$6m*%|ilCYc z49sd8av@KHS0QK+H+*SnuHUb*{;3I%kf@(P$Ts`7mV4`@@e%pT#%T?G@z&e}f5zGVJ4pO{ zGgyvr__xiVfF45lH`<5aP~BNT_teuq4K>{*|1Y$U-^SOEd(Ztt`w%R{jgn7SGm~$y z|DkY?&L1aiw&J`w|`2t!UlP9$0w$3Vr*Rse(^FF!I}p80>2w2@+0Rg)|A@J zS})l2h=U(;sUNnAs-DzC#0VUmzhg; zt?#sq{8QBiHAMdjB?29TF^SRRV$ETPe!q^t%mvD<=~=_vT#AkhRc8$6?YUcWsq0Z{ zp;P>!)q|b)q~?bT@5LmZ+yBC1W&NV#9XktWZeA=$Nn;jAzUtKQs6IH3dP=jNOg0|& zLx_`{-P8~$OG2vgx}pB|#Ify}(tLM|hB-48Kqx0WQ^ZNs!w&{-`o}aecS$bI7oOGun;6!a%HtO(O zBo4@wo{_FQBD6Agke?RoR{cX5;&#ld3I%%U;Sj!ZOp2o&tn2T#0ZE)<6WcoNF1H;( zgq!haM|vp8nJDXloV}!TT;@8Sk`d?JC@Yy>E7FpuovKY^8`sy~krVn}v!(PywWu&q zSU1`kAdUM_ME|S53O}o-sjZdOQ#%rh?jzJn{dXk|$sVJ$M zU{A^75M376u@E+wQ@CG293-7|0p$-QyXix}*GvMXfw9%65HUeur=HQOzY!2aNRV{+V@> zBJab41BiuOet-9j6H8Gz!+i{1;r?C`QVkk1?h-uXn+@BX2LPPoC#(f1>l5Ye7>lRD zD1zzsx^wt|GOBbn}h2Xr>V_b-hMfp_ojL}d!J=CI&O>yt3nbM}p`R(ESldFGBH|Y@1 zc8^IT3Of=^Yk=7Mv9fw4F5PnV5P)6j#cNrKFhd#t@tJ@EuS%qCj_8~QM-~0r!o`0s z2*u^cJ9=(vhk=+)11eokx>_tn`g7<*2r$PMMSYiqu?wda#NMc$QkH@NF%sgUZ9Jpb z>|p3lyxd&g8kl^V&t)~%6st-dn-pD491Ka8Dk<-9JOdS=x}+wU<%+Oow`c^Qt~Rw& zwvqYK&QXOZ!ZH=xHzEz6d85wsBwvDULJ;Y~VZvW(?sVIS=*&STa=%}<>0lKeFg8NQ z{m={`!Mdo$M1=;+pu>=b_Br95nr9FqDrRfqEyGf}o!*Kuz4BL>@HpV(j1bS2qku-# z;dtO;aWG(5RE#0O0SUplBau8o!;w1DFrY$+C(t`B--#%sw<9L0xCF1r2HbibV3_a0 zg-nMzRO1`fW_~yns9=)l8Lc=gHZ-G586+uWkjB*##|laA#!9i}Zuacy8HmEz=r&Wp z%GEc?9jm%EA7H_I- zq`%{1NOju6%(vBOnga5_9EDlB%^G^j;^CyQJ)PaIqxdCA-ESdE;_EmTLqd#+fw&+e z4uvgYhX4f)A0lrz9v03f)Vde179vOw=vdFKtb&a#?PhCmoJEoS=nX@_uw5bcQc8*v zl+r1u#hp&b0ZM9r8!}c9;=65I;Z~Lmzrdr@Q~R>iv~Uh%VLxZeF9cHJIJ?LI!KG!e zP*#JQsR4Byigtr!H042QN`GZZIuWRNS2?P=sTMgP#`=xJ_NW9y{9Oq5<=bT>VWx2GK;B=N_ zctHUSwG0po5V-PXGHW9Qa^ZwT#Zib|gC$soSZsV)BXv$;5*ANvx^!+Cis34OY*Ucb zlPc)=jGyR*3M9x9OF>dRL8syx)eBnFuK}A4TY6R@OWywO80_WC|FN_u-TW%<*|urg z))jI=$49b!e79~6$^P2pPQfq8+CZB0D`AgJVE1A7*}xYk(veQPRx-GJL~)4C{<($? zj5EH!KXY2K_4;qKFn(sZX;XJjKJv1fXffeF>$9$Ys5{C^TK)UWg1oK$*r33Yi`U0i zPp^I?Mt{s+-J)IG^kM5CMu)9WIPrzZbLLmX$K1AtZCmnq9y}RT_v@YSkAvag7zVGV zMuDCkZ#VCpIKFc}Qw-%felZcFE5xtAip9g?m%b9CIEitVgkB*rYnE6HNvtL$47$|X zPih+{wa=1H@e?~XOI`g8Oi;el=rWIHS`Z7d8DMvN$^x2Y2JzOpFs`y)#`Tk<@UA47xYMjL_ns&dYfd&Ic7(vWGC#)UhXHITOnWJm%~Vr^C#p<6*;CU z)_FjlQkNs*OT78mbqZNpR<3a!${EPAhH_sXm+qkF)zaX9V+sMV0O5$a{{;g7MbP~> z2n-%d)Pq1;E}kU$PY^h%+Fc)npue!5)at2x>h7YY=e5{i!RgdrD0QPA1iId5B*6oF z+Kk;Ph-Kec_a+5J7i^OtV_65e-VUFbaO@0Sks zspjxlqoSSbBOh3V>iZ7;B4!d%SIbC8BmQ=^SemXcDy+zMbhf zT--7#g22%%9xw!t+ejeTLq-I?toZP0TO3swm)|>Du`_DLWk|zQ9SQnHyIh**8UxMm zb;;u^rpf~^Y%xm+TKKpfpk`l+)0t2ya zlW0K)b8^u7rpuY0b76rQ5uju9`kcj!(*ka&cT>_nd@7;XLQ1L0Sc(pakGBQ#t`nsd zDDkv&ej0+?xCWv2&0?q^Hxpjg=eWuGTVt-%8%_te?sJK?7kFv%-D((fcra?@)F%*XnG!Gv(D z5luCDdi`Jx*$mvMzU6txk0vYrp0jPywHHkJG__9NDksMlA|O;KD@|q)!Dge4yo719 zr!4kEuvzCbU>+egKecJeOfj51dA37vmR4q+sl&>{N^H}y-f0TsY@a2#QO14UH|a;Dy+iQ^q@09T1fUoI}# zQi%jdL>SOd%bq`$2uA?Plnw-z5zV$Z+Y>wk6IHaRA8Edzg3Z(@5y!^0C1nWUt<`3y z2Ws$#E4o-20L;ojg}oF?ZOb|M6d*S0QN!eF6q2-GW@xX;v9C`-h1sIaV^XC4wE)1% zc9UZh9n8J0gqYxVd?2Ir9O&(n{iusMKZG@-x8j$yps>Ys%q%}zPL_yi_;><2GoY0m zKgPllS-~vVsN%VRVf?ij0u*L`l!iw}X}Vam#;Hr<^i+gK=93atuzo2MQR@hyJO*`r zJB)47u|l+Mj3hVYQ`?jjHQRAjLw>B7V;Gqwvoq_(rwk%N)h04GSVzNo+Ye|#4JRgW@8g{-i*V`o{mr*8XEG1bq~fe(_>)fEZQVeAvUc--Xn*8-(cjvi z*;l>xck^E10=b0l(izs$9eP^KS;s$X9PUA91kh)Cg? zuCSi%-H6bG^Mo6v3^n}A9Rf)sIyz8wcmCQbp%Xs|JZz2g;ydnPaDnCoWEv4O2JY zMVM6;L#F*`* z;Ed{+s9RVj3WMXmbe|3ztOTfHRkqJWrL`n_#|LtX812dew;X118E-hd%sx@=aQq8g z0rKO7jlyQ)D&w;|H#t=(w_OVF>{7uhVT_Y6D{~ zt4SVW6n<>^FB)=fKS`}YP|GB=l%-y5)GQYM2Y^ocm z;p+`x&L46q63^;e!tnF2ui0`=ukk*xkRd-K{4KzrKW84&8^0)f+201Rvgc3#%K$d= z)1BA7Kz8^Op}40ehhiWz+}XlRQXZna8&R*O$*aoDkyi|wn2XYL3p>p=SxqbBO}SHN z9#&=Qq3jx#jYipKITH zOt;hp4-OblRNptqnXw@wdd~w)Us(K#J$R0Fo=75J$wK_xk5K2%$LLyU^y`$m7tz>7Ggs*te15jD#uEuZno>#w?3g(=~fN7U)HEWG#LjZyXld2Ik@6=)Zl<8u? zF~ZiVWKB9St|!XgYGfA~(TonFg2Zhl+5<)5G=PiJ>mb^6s?{$(Qf%zV)|Q)98pX)) zTmc(l^0PJ19w6@fd_l)B*n6aC0^R=m+Y|Vnt6*+23c%vZ0qHDc?#mXbeHRNl-zEgx$2L2`m{ARf;9=bJu}e?$i4m+WAjA!qF(!+2iqjw%|!+{=cHdJPs1TC&t)w5;d&NmE_k169>st^ zY>{d*5wMC)f#`1DH^=@h5I9!R>2A<~-UO7?k-8%;>k(j?8VOqYD`bU0yu8U8eWOC| zxVN=HF9L&Dzfe8!pv+LUiZ4{4L4`=hP_r%4wYxP&qpsQIr;`G+I7Wd3d1*h8;Gh8O zsCx{EskS3lwLm<*z5(yqQ)Tp3m2Dh|pbEqz_>U(M)LCrIR$(IAej;R=%AGhx&}RN} z2xaqlI4>(uf%Afqpt$tp+NIWJDthP4v3_javl^rMf^Nnbs|5J9iLq=XAeB-KC+rs2 zoQUUTXrvCR1d}5vQsNpReueD__6!|uAM7r1Do-$3P494vDJ2F0vB4EqXWiiu6S5d- zU?^y%ui+4kl9w|m0xrvW?PE#I%aILsMOe6^PIi&+10e;bB1c8)FtCO2!wkijW+^}$3ltBmp zj7f@1tf@3&;Q{$hh>7<*KLD5Y&#>wh9`hPG!4^43@29kepe9d5uI*TG`mNFNtA;)B z(hLV;L?3d&A9H84zhoTDR8l*#cvxxw!c!Ji?#X0I4x<`1r9x)BaAHs1j`@u#y#uBc z&p7gn*Sp!Rfc9qv1dy>YSHH>82Vg9d)IYO*Bq?l8wZ>g^OxYIj#a{8Fo3dP49Gv2O zj)DJUC9g4JK^=C7g2)m<`O1KH?;cQF?k?BE-=%WrN-7M zl3EzLV-4G`jm^+puoXBoBYlVZC3daqY)%%Biec|!RjM0rE!#zD(e83)F1FS|s zaaXVnR%qB+%SL2ajl}$c9}2AEkVSP|!ee!)U($Gyf%M6r_eAsdu0Nxq# zm@L0x@^iL~?C74uvdgmb(idQdBq%uviFz}}rFX;+gvncvPIc>*hRLx}8kuoqU5=AT zSmN=CVRAP#|z4Um{sI_3u(^lGcF~{*Vkj!{HVpK7KOem}@TiLb|g&)#; zleE&Q5qeu5EZcUe(4fn))eIK~ODml|QfD{~PfZeXlZGM;O^SrgIplw{LFo@Bdf`le zmeHG^=xjbEeA)i{g+?b(#V=`JARrZX%^ZT z7oYrRyS!+*c2JmAtlVoipB~;zoZlTZ9gCBuj9Dj0?RE93!m-(Cj^5~CG--nF$cewA z`0<;&N&P>Z$qPq0WBTP+7RFaZoP!-kl$!$cMYo18>Kf988V;k?hT) zv` zoC6A9XvI~%U%F#<4b6MES9wQJV{?wd3|+*BP~N7L41`$km#zw@ddb^DX7~l8$fQGD zg*|Qdguo4+_JzvS)gel!Wy49FHOIwqZ}W6o__e&)ig zq`Px8jPG2+XwsYGD_hQ0zSDM zr|h3`BxJ7lVB2M@R`^&286Cf#$2y&SJH)c7b0HC$`ABy8oirN5X>0Z>+xwSKdD{`V z^+j?TB_ks=v^lRQ%C?#1b=_WP>w)IfrL4mF;v>G?cGE=zh_m~h$3X`Yt%hO(^&U0l zB?o2{*jl7~>I}NbZt5TgYSsNSb&81+XK1dbSFhCx_yNyYP;j#3XoTXS*ROWH&Fhl7 zJ4m!MBQIoOXf)9!OsD&p->VxGdUIX}BLOE^w*c?=jaFXhLP|S>pdLD-f$bs7U$-5P zyN**WquS5lT}U^%Fn&s4?HpU^RrP?wDZTUTiLQd%f0xqmRAF0N(1GL}^74vk+&PO+ zsB@SQ@u*gY9blpF>24Cwa81WNC+e2(^Wt=mVP0_m4@1?G+rJ1EA@rh>duh)02R!7Mt+jj^?@L)dMW+UxbS^RHG0|2w z5qjO=3l;Z8?Lxsr9B8~GizW}M7w0m~IIQqM$ebF#BJlEN8Rw_W{DQ2<*J31Z_TVw* zQ5YMl%O6&3)M#=jObjRHyw$Rvk*O(yg`1-@3U^va7O?qjUcjg#I^|g)(iYeM>C;R{ z4>cCGWF%;tperY@dL}g96~t~RWqk_Sr4wPCtkO9}Eu&0b0w3wBp7qh+7X2ax=f_Au z*eI%vbGbqtdCfLfd&TPcyIurIhepk9-sMc| zk`UOdYPXOia$eJb?COKp(K^RmpC0Y5TvlBgDNM*a@U0~E2k!_e{y~rd4xRM{3fr`j zZ}n6Wmsc2VNlL?dBnTG$*2qTcnpg9T-Y5cU8G@aue3Rx_n6I^O``jT&W`l#YlT@b`Hr7)?+&FbU#4N|~ zAO6JR`RdX`{z)F(q1M7RTIpmzyI_fJ4`CJqGIW<$dW+RTVwNcbjyW&CRjc zlk--cPKpxk9GkK*r()x@5M0-GzqXv6bsKf_1y6AipJyA0IwvbFs$zp*+}P`5)|sPr z&A*&^v@GM5soOJ~$W8Xu^|_8`10*|osw9mm#pIQ-yI1j#IdtxQJ+|wvl5)N-bnff< z2M^r!UBy2+@8;{6E8p~f>GP+P=e}92_e(e5()*=vzFDsKOV_xI2E?Lg2x_ET`u^!A zlACW=txd9iMBpw);!ADef*8|9<&s}sDRX9ge^mW+3ha>}@yO&TEhdG*+JK|$W#6CZ zee(crhpm>poqE1UhD_;Qlmi(VQ)%wguW&6lw13ykJ&Dk;S~?-u_Jbtu3m~>Xw&{y6C@X{EPd26nCRXURIVuw zTPkldKOqZzFZlgveXquM-nXLV=Rs)Qb!PE8T)rO?wb|w8>&mYANKEV$4;t3 z{+6(^))lu%9C2^sp#Lx()8{(Z2(f6URgzTB~U=6~ACD(DU9P3Vf~W^yse!9D=(HsFWT(0Jmc0E;?E^rzksPBG z>wxKxG~ZKzGd%=;Z?^@?l3Jxsk&*i92(s4*%~!@^BuOJ@zy z9u2_9IdC(q#nm+<^A)YBrfEJ3__Ek!8b_CJ@XM3OZA{gBs)|12%|1F+-2 zPz`ackdT6avMe$!*Vm91dJjKCv~so}erGV^ud*YLdNTvzN^KiFsfDkE!gm1egmmHR zXRCC|1=h4#%%b;=?pGm^nvuuTXo zAKK1@k;^8j%K02pYc+#&(5lMkj|=5+Y1TH39W)T-x!6YW3cmk6x&8shkaYKu7=h$21_->rR{LNA+1CMrqEYH!$N}o`g+vZ8h(O6J!t?tbQg-1OaugjCf-3B3%^}CGbg|)%g%`L z&=?=JZ~tPO&adus4Q8&a%Bu>nI2A)z)FM0gp3L?$Jqw*G^rPiJuWK^Pb~|vY$Zu}T z>pX80qdDJBSvy##F^<@!B~DEi?r24I7;ibgy7X{-r6SgAFA~}@P_eNmtK%CDuMyvV zVHi*a<2jZ-{fN6?sm3bcpxg?jp|}F)U4Af^Wfxp!IJy`&U#P@76ft^ZZ zLcvRj6)@5mCKu}$IT0@G==vv#aB8(f<$T^gUptQ0b%BSWj|kPb(b-Z+&V#||i|IZJ z+o%emww9dMBkCVXH1!|7!2WJ~%*w zbYnw?S`i{s* z5dRcX^2NxTDr(MM2(SpP0PqDUh7vjW@SGLXgsn+5A17Z1V6&F&r8$id<7(fayu{b= z0RL38cZG)ZT!;fA!V3+lM+w#Upr)&!7MOe!kkeR*05)nm1G=qI7r~%PNxsX%92ODO z4E!|>a`FVqf{nSuqKE;gR*)Y7(kvBe5~f^dK=ajp_9|$Wm~agMk(L64_#qWyj+k^1 zfCvDN0TK|Pma{19REW=OuT7Pl`jUpG39%Z0lHoy~l1vnx41vKRE#)JFa!`l}V^IzP zpuc1XgydtIW@9FlCdB4Mper&^937jQ+mHW41APYgE+I*OeQ`iR>|+6L0`1;j%05NY zJt3qOV8dX_c7XN~p@(6NUQGuILK^@@LUIqAG}w;ZY@!Z;L6i3Pw$c|8fB!U3of6ZJ&KOBl1S zpRB*9)gkl~08ppw3)vDa--FJQ(ItSqWhV4T_42 z8ipz=Y8X^h)C7=8kbtPDXv3hQqDG4rt=a%lQK<$+i;5Z$ElzDvv{!zbV<=E7!RrQPH9{mF3E$?I>i1Uk&s?)ifNn5QuF zdmmnS1B~Q`9LX?>Xiq1?jVMKqYA<|&m^)?Dw2x*<$Y*#&D+E+<5~OjB0tse4La=&d z`UZRYaCg+2&)3$tx~BN_&Ts9FMH>z$xh3~A+uu9R@5m#jMa~--{mspAh$7PkCIal~ zI9_6)cF#aAvV8$_d$30w$7GEwhA+XUi3zbX>uf?^Hfj&H?=m;3%{R3eC;+a*f(9(r})=_}{SvF$p- zF0a>6%rAz0Rl&~JkPA!|y*~Qu>+8;!wkuvW&U|x{aBvfkz-fPTeZGmecDd^ICPRYp zpNNisoMbTm3!>ov)~h~ihwAo}l|gd*=Z5M(5Cw7P+-{oB`|clz zf}c-|dn&E}i72@DZ<5>ph$t97%%{6A>H7~v!RNQTZ#q_YSStg!_nD_`NMA^@=(uH1 zoIB;PE?(g*bNb43#kS&W+B6V?tO0Q+PlB&27rEqZn;rSi0rnAvmO!Wib1R9I-)}~eeK)#3&wyzhXvVAaWxG{n$ z<1mN#AopI(Z5GC=F46XIiZ37RD>QdP5OH?`uTPvSW%x~LN1clc=EQ(QK}ISD$x+{M zAof5*UbWjwm%TIDAm5O?`DIX68pH>h82^iJ1&I`p*-hPny{f@ku2lAK&%tgNC!W2} z6R1ek)V=cck8A6N2bzFx7Cy71C-U?brD5-$Q`jSbR;pW6E_cx7H}*yljn$y3(3cj} zM3oOBAR!_S(kap~#*0C@ynRNwS{oh>r17rab*!CXPcNlWgN9F~a0F)OIQ z)R@#DjtY5|k5v(t1&ym%y{f!hk|cc(gyeq^6{F|pa<-e3JoFgDW(G!H@;8=7u^296 zQW$Z{mUiQ(>6Be$^(&FkIIO@0^b8rO6^x_31Xl&vIsa|*()AjA{$%u(V-p-A zQeI*gqQN90#ireoVs1$-*~vpe|0uo6vB%&>r#r3*lcKl+MKMNj9VSX|hc7{K4~Rvq zdADx9g5N{-?Ae6v+#-6u7CUC8qAKkvi~^~Ny$2oMB%nw=meooB=A5(#1z-kn`}a@M zU(x1mT>5`HCzA)$nJ{vh_Jr8k%9zOKNkxU=>e z$Ch3-!i#%RbABB)mLBQ`2Jhtmtp!5g>@!k7Y(#noQebz``8rYTny{< zf=ism$n4-P`&R+A+y)3r@OOcT!n;R{k(Hyk%+@-)O;mHi#=d@4$G zrcL;>>;eXw<9P=gl{fq~KvI&SPvw{cv*b^V%vx&34zD!8iTC!|pIi}YJ)&`J_|6Nf^`rx^R?3g?A#DPr+p!E^tu_h^>Y#}BrC8dT*w*k zIcWMMSVq?CNVDl!=BC+8*AKV;8#RUINOq0(PWF2h=@n*Ji03YOB&XB+46D*ZQ4{a{Z%dC_)_qX%q z4~K~u-x-_b#Nz&z(6hnZ7qJKzDvAccXyA2QQp?H3vT%@m?0wO7m=?zVfb>Fee4ApSINS`Eo*Pl7h_-IR$zyV zh`P935J`m%vG=_|@^FNdCW)YhZj0dv39eIF-|UW(Hka1S`%09;+YAQJ9jmT2*LZ?_ zMSSp>3s&VbT<;L$URO_f>YDZ!g-WG+$80 zPk){1aE;g5XE)2JY{)mtb33fGVSy=oa!I5Vp?YIn(r2+A)MHWKBa@mEit!!zJI^0bb()TD?|XPc|;IX z6;)*4owDoZjeYkGLsyd)E9JawttI(YWIw;^GRVQMOUrqugcv_izPp)rCcC9FB&x&b z4kr|h6*57APY`Y)x2`&9b*g^^XzQ{Fu^17JU!hduJGgKXiSNYRZUMkPDXfV z^Q&U=Yf}P(1gC+>#Qhv<^5fW5H?{(w-rhXFAm*BLBU(Y9d}CW%ceAs^;2;jViC;>V z?Y)vO%=rj#(*VwvExl$6q`S?g_}Txy#G!xo}dYB1YAMM-b8;c(T>2U)GYaZ)W@fU86N+x<~8e@(gXYO@d%| zFjG+7M(5k>Ua7U^aD?uBqj-l-Ah)UVbo*QuH+&)kG-MkI(U=+=!5{NJEISH8Laiwe zqp7d}j&UI7IjoHteOy)MO40VrHfJRfypEmyH3=*n8_mf`KC$@Hs?9M#x7!WW7WFjD z>;8!sdcW-O{snph53`zPV*xThKq``B&fK$8F6(Hq^@i7K%h@#|li3R&6X7C8sAPFv z&=83=w#Xd;!ZJhk^Kk`Nj^hf>cg) zpm(`&K4JpI%V55h>k(ve0JZ{7H_coYzv#hdgc;H%$Ju&mf;Eg7gs-l^y+Tx_TUkP^ z%aJU3+d;XFUr+eP5S>e8cw=uwvJjvZVHKUaJ4b5wEF25>ZTDLd`|T464Ax8akQDuP z-%9rkPw|;b|Cx;&-kgq^dl({ogBtZwLdHZU%D%hmCbKb*{^j2doLvd8dWz^$&JArc zwlIG&X3wv8l&N1vLst>#;*+PQ`;o=!3rI|~0pCH3dZdX!){Qq5;Y_6+zg5p`f9$o! z^Yet9?$r?c3Vuvy)*f=B5+f4?`c-CmC zGfN~b(Q*Uzy?3wb*IU5AJitc6wJun|HK;Cz$U2-6w>pq(M>Z*>HhX4<9ay-ExVZHm z1YAecXF3B;^KI|@?fnz?Je4`s0cqN#-unw4|B=plKHzZMBY^c_4i!|WpO<+21bBUA z(Jp7MhDry$123$Le#ByKD)B8dHN*xgJwXtb2KK}lNqM$j-Wc$-ttraRzzvu?Du2!6 zn~wPnqo(CzXr^UVXHQH>9gI*Px^<*gOmB}FxxPTT>R^#cE1H94U)HgxZW&ruf`c{t zDa|>7U6Sy>bf%L5s&Xa>`N@xEo}#ZH*$6luBF>C`h|s?h50zCR8(n_Xlbg&(V~ z5!YRi?h_;M;1om^XJNn(=i0Xw941DNRiXt_tVwxx(SmqWVl3-#WD_@_0Qtb=AFukl zw#n#!%`sWKRsL@R0qZ%fPdH1d4N2c13?n^exCOHFICaOG2a!fT?@lY26Guq3^XM=`hX89`1TA?c7jA zwhnoq+Q-^nS9;m=*gKM{e9@)Q9Lj5sU>`BPC?qDq^#zF}U3diB9@{!x_RB(D)i=Ys zQHw^>-k-5Mr~iyQ=;=}kHaowKJWLB)siW=^#1ySA1HD|f!|F$D{YBPWvFAc(XHzFT ztfaz1iUa_*{!QytsZrA6nZ{O|S}ybVt3Fj@wJ_S6Ryq8D zQYr%TJ;io?J~!A#rQ7uR(rY2sKnzWLA;zK(uQJPV zVS-Du$sn=hbsu?m7Lpo2(U*Je_+1aTy_8wW&9P2<62^ZH4-b+=h3JFik@iE9T~XXu zE_HMV3=$Cw+*Gv{(@$09Z_DKG@D)bk~rhY7xAbYOLts#KAjSv<;cZk1hjtKbeN z!RHq%pjmf6g1lJXEF`9Z?{4v0N}4FV^2~B-KqIl1=+nv0>phTYqdvsynDR+*aTdR) z`hR{Gm=zDEx^B5=h*Ij4qGLbK%JVy?Q{Dwu8)fzvJFqzD_F`pn#Bi_~>~;z5hMht_ zw5plrSf!CL@=2@uQS`~n1Gqb+Gv`_JLtBrqqB0YutmP_ul@Z@}=CX$|;Ud#TLSO8X z2xcTmia1>H!QiIamY2T({o4==z)|oFxc;{%qyhc;^@Cl~aP_vo-i?$T?bRyF(7({1 zqx@8TeBn}X$`RUI^O3Up+5fXN2hz> z>n}UmQhWcE#3{1&*&|tqlkuj*1k^HnegMz@1E!+X|5w#FML=5b1;WX%Q%-IYSn@*v z?+-lV4rCK1)v@~w=+EMNfH=UyRiEaECAml z1K{%hsR=p3C^4WJNUsMP)=7N#UmBayjnE^>lJtt;b=?*zCCc08&M9J=ohi_ge7Zj# zeZG?$34z(u*Hg0;(`9SS3=@d$vq57_K#2!}wb`AodVYvcHbeK8c_p6GE{`R?Sr*MI6jNvV{RI zL|U~SM+(2fOUJM1H!ebPMJZVRO4Z8~zlHw{TsYAz;^;s)Ux(^JWruEq9EJ8LzZoMiWBh8nwvm~qQ{)ur0Xu;P3m^|%TY@HVr4`27_IQ#YS$ zO+9KGP0pQF7X;g3VnYd{c9uhwgNLW45zir}f96JDr6P&>x`1_G|K*MLe z`m@TnhKd{w5crUCo+iZg6_&7~(T~^?1lFespG1fG-D5_>!#=>1n1i*=gwK6Tw@@`v ztgOP$ufMWrcYOmIW}H+~r_Z+Oe;HEn>v1~0L3vGl@bnVg8Ga}Nar|-m1o-&|6Z$+vMK~SF5_|K_3+lq}A5mWT|wkrXSJEg>A+usZd z;lH3ibw%txjxeRAi~jX6PGf*q9=$6IYL{g>Bs9g8pg;&0VO)}$sBdxsf6@+&A84YR zvG(vMhe7aA6B0)2?iNg8N+S~NnIy50>o#7&P8(rn5U<*Ok~7fuX4W=DDwyJxFKlem z{2?KbMBY`tmx!ATz?8qMVgMWmR)2%a{OdXF5SJOm*M9#u`677s=LVO5X9=_HI=9WE zufNwD#ra{+-=ieE2Q&XusGoDlA>4kmfiQOeM9O~%^}Dd7{}k#UWS5=)pN0CKa-)9} z>I;|mNfRC(r5DX8957*xmm+q7Y;y4Ik9lLe8aEcrEO1b(`u61*pfd3D>c`}WU7rd^ zCsb^z| zc`I)Ea$V!e9;0p%Tv=P>-2hgOGXk=Az1+7v6#>Qt-Qj$;;VQ}Jc8VFKBB#ZM&s6Qf z9HW(i2o(&9g+wZwsm{`?l{ovxk31DqFeqBguv$47K`!iVN-YWG_1YIhob;RFu2h5% z`x~c-DPC@4_B$Ad48lL|2uEJ~(xewHf(GU4Vsh!SLp!3@Hwdf>JFkh6=_O9-SuL>z zPj*pf?#(FT4|ir&zdh^jijo|g*}ac<*@uk1*y!DuMBH}wU9W;B%3=wS zx^F`E#f6|MlJ6Dyut6Z~R1`z4$0ZA7gegNf1QVp^l?VSsoPbEcO7bu~M6STO#{*@V z^937V7$xe*6O9QTIdL-7m?R}HXg^zWObDd6+%TQuY!EV~zyJ#l1~|Wt5n#=aB+rgz zs@V}#pJi6}@W8*FtU7h+>m0T7)kc0dyeB3i)-Z7RB-+z~D9#l38XF6ao5HtzZh+@F z&`dP~R@dbgUIY-Ng9z8o9~a0jeFgkeA)pq$@seCbBfzh;t7Vs+INy=Dw&+TylR$}K zg!}bj&I%xr{6e|tx%4(}=E(?PA>@+?Az5zw;!q&I8C>4mOaPSy-D*NHcZ6id0d^H` z#bFwu;clqIaz@|kRS~$%M*A4t-gn9()3HWor0Wg#-Ez*pEMA#w{geaF<6}MR20m0$ z6pLHd_S|5=9S~Kx=5j?*b`6|v8I;#(Pk`gw9LoAPl+TIp-R2QNvz;rAxapXjEh=%Y zm+NM|3N`phJ>GM$sQo+Y$Zd-{d2w&(GBkIVS7Vqq!D~-Lmp{#KKm>>Go5&$Ds#@)d zU{i@hEY~WThj%Ri$|$=7h#GSqB`4^#IEJd&6&@}kP`iyc$Ea*Y{pBWa6?p&2GD@?! zJSHU_%5V_^I=|FEAswkhid-7)NPLkLyDccv+(}*$!ER=GY3I@k6c~P^fXS6~8R64W0w7asZ z0;P=AP(rXgC9hltL!ev|&AWXD7uO!aeil?@ceYl)(+G|su2x^4_CQtv{%f5DFy6cZFcy2=oudB=$epbo?9#T@pdu@c%%6}`YFnf}DEhw@_9#FuC6j#8q= zwV0~qevlD*uE^vykb1TnM)0ws0J1TDqhiGD39-jvy2IRsS}ds38XyB_Dw5isvwl;; zZ1wnIj;M>25QGIwnhZ7(h1E)NPtZkqaC&|QhOd%sEYixdC*GTNp`|`E&+J&r$uMsM zs(=BJBCDXoI!*G^E{TLL(Nph3V6en&TuKL8JIm@6cdt|oDH{OG{W_`ZCJckhdu{|2 z-Jr9U2!i;>*Ru&!2}f>YfXBT9Qs+_(aV1b#>`cLVh`o2rP3_*HEZFYZd4{wRvB$k5 z3e1MI`*G$NASFfU~yL6$DV4E(QWA!}OV>~5*S(vy z(%B_FHiL2qD^J>)R;V=^E{Nhbf|`}7klAu^Y3L}Q9K{YaZ_y8`{EpjEy?bY~y(Te? zWw(p7Pd3--Crhn#96FF=tvn_cT=4_F1&ZfPuik6Hr}&UhA!L2CfDaOy=qn{%>_!bg zM6Eiv?Gj3SDk;W)<>S)YT5LL{5uu7&ny*x4K8i#e@tDyhoS+>eHi-y%Z4^Czkb>R{g~18`f~!$_8thBnIRNgNqjtN04a z(-;dSGpv8rlsf8c=_{1*QU!L22xl-^@EuPc0^Cf~VoTrd|el)^umfeZz~ zxWGK*9=t)0&*$LjD7-<7byHvxQNsJ9kdYX_K?yHJaC#p86ia5B<(>x!uYQMkZBVoj z`y>!EM~ma?aI-kUOSHIt0z4I6$Z5cPDe-xnqMy&Y_x;X@j44^Jz^#zu#Kh8YRWV)1 zuuwo7l=zHJyhJq(>c`IwBE(4Wg(CdL_lzYdoG2!76%gYH>rjp%9FC#pV#S{r&IojJ zoWUttt=Dtl>E|uHIT)HD1R}wBD>=@Hg*Srr)?flvv&|4|YhF1Efv`E4mIBzBgPBUI zWP-}#1}mO~SHg<==Hn`5|ctOQUI}FtxIm(95m6x{-W;n~US-eVtqDn8SJj}LUde>roE>@ty zT1#tGoi%}q%0`Su@nFrVlRMZ;1vOMNqTSsZCb2_%m z``9i&^Lp#xU9O*Z`FyT5`CM+l5eciPf0ARno4GrTX@U6{qf3t=TlTN(zkipk|Ix+q z4;>4)T>LjEYj`JJ2u?t#j zHM~N1B(XT42$rR_Tqoh(7d#Ea09D1lxJ@>sFEuCT+nE71GSZQCl@l;@^Wc{|#3$cQ zUo$Cnm7|Xi6mA}ju`_yEZDSh3`~0(@T46L@3@>=pHFH|}`Vi{a%ATnC!1OKq@e90) zGcQ)4-2Ld&slU27c8n@AO5be@Nv_|Ne^XoIxNz$jXR=srz4oydmQPSsX3LxmGpE+o z`iL)P27l39Na(hr8QZFVsNOkS8qnK`$r;b+mJOdZ@nV`so+p=7Sd3KOc}9KrI3x?q zbQ@LuK8uC|-gRG$CJLg3=?n+yVHl0DYvZy|^U;L#*rIQz?(TMcK9Mxj@qpyR^^h7E zpx*UBhXn(Ma4 zgI&$+NrJ9Qpd+iTF{sUBpx8gh8_27zfC~oY4gqmgFeI5h&E=Z2zpuTT^(2k z>xAG=vd;%=U9aK#)jWv>OtaxBBP)h^r^ec#)G$fmi5i4;$48%hHEIRBq=a&AMTck& zy?46CEWT4W%QCOko4`yTlEDOw(Ed)q)&$M9a4@D6T3Q70x@@1SdtyLUl~F861lo9a zjB<4nB~W?)cfRP?C9?@v<{KBs#dzlL`W5QR#~uktk?;wuA<6DcZx?>4MS@Cqnz<3P z4W`a)?y-SSC>d|^+iju#LYBvUsz&>Z#`5d^R zJ9!q{*Sk#T2zT+K&69if{;@z;lz-#dtUta-=%u5}tk~CIN0oN{)x|M>Oh(?QlX
    f)%iFEq!nyZOZP$-45tx;Q8!`_>Hd zMYP7hx;PxZ_s%TUD`ec!obr9L+8tR5&sXKPU=46n+)*V=8fv>1@`&jYt^rmL6d0Bd zSD=7Y;b7x+fkiG?zybEyF6kG4p(`b#sND>L&cQi4Hg4yX zYL~+J8`t?a;>xEu;9Kf9?uIT7I!l3U>&^7saO2iCq72hH@XfTc8#m{h+V55;EOXta zl};bB%eh(Qiuu%v+o@+ zwx0hi9ZK9`^)=^*f6`|ubz@mx|0xvwvvlac2nGL(qi_E0hT8AfKD!)JKBkK=IJMI} z+;BZv(0OK&t-x!jY3|DsV3gDRk6!Lb7JxJ!SYDt&8 zdAw-d*kq2B84_=_Rco{Jei_3N6m?-$qxkV(lB;bo%2$?IV}mXBbaC60qoDh{9s1=L ze~vVsM#wHR%i4VD%c0`AR>bg#9@Om9vp2uE3V-_1GSEs^n&%yq6Ghd(J1S1N$B!U` zxKvdl7Z7vQ!CMgIQIXXV0|0(o)dz#sOITD59jUFO7FeGzsH1a-k|I}ojrEsM^hi_m zX0K0d)>68b$la;$(}JZloXqcjbNkPp*1QC_|0o?=cI?G}DIH2o4A}T`uV>qe7i%P6 zRnOOCRd;F_!uF%c!MCxkk5P$JrVUd#b3sp&33tDWLM&88(DY*f3(Dq^Uxk<1s2w8q zy%w)H(y*jbwD~|B?8ZR2s$x`e7ma8o0llcJW%t=lD`;U20B!Ye5Uu1ZMk>20#rJjH z>F;DWA3Lnb(w*25dWl?SwMyHh=B#MosLLvhdoYZR@0Bb0sE(ITL+jmBnOb|N)B=Y+ z?#K`NDfuZ$r+qSvV8>NnKWM)?c-6r#VkNc0UW%pbt#=1bF+!_SN=Rk(Mb`;f(iw<} zt)U2U8+Z{}M(O2?e!E7l;zO2Tkgjs||i2ykKc1^<@f zQ089`G+j(s>m5hn8`KxF>xB4YR3J}-f$WAfGT1CeQhe8B&E=0YPw8d!XHUZnO0!Nu z_=JFzL@`m{sBr>zje4YK@M1F!rEE1bj(A*f^fLo9XAvH7cY8>BaLm5dJ&%|knwXGY zw0!c^Jc@A%E=yrw0_TeF8E~6CWIaJ&!r>F(K~RhXTnswx+rOy7uC2}2NSi49Kq6+0 ze8a+e0zG{H-eQAE6Aqd~3pfb06+vfJ8b0?Y$-NsODrg1Y`X-!5Gu>C1aWk-yr??e# z9LP6K>+NLrd7mxx#XAhKOHDK?YB~&=6?}{Vrpf!MSTsN9-sX)~xC(?{}1vQkA^ z=?76fdsARm6D_0B4(2NtB3__kS+;{QgGz9TGp=63d(O(uit;|>jW?i?*kDX%q~Fv; zn~naoF**h1s`_^rDKJ*Q50-iWP5G)Y$e<`Z^IOBjF3HL$ZBa#sDD4P5QL#lXQ1K=K z5Kjz~`+D(Qq^p#Y{7R&JsGh(|X(?7k*7_8z%V1g&LzBT?)lfvi_Z$cxi=bT)MS%hS zj;Ow7)@V#b*h_=|T%Bf{rzxH>C85M+Yms}ZNK2Jrq?J4;1=%OrtC-SNh24~|VT0P=ffq#>dT2~+r zwDMH|We!^5cm)fqvK0dM1_em?I-JC-%78#yPe8$@(v2#6vopQRT!9<}21S^@J%yE-#RUo7CU4uiDMddNTX3XmH+VDa4f}rbcfhLNlIhr;v&z`e}8PQC{Wdun|&b`E@7!c+J|r zZe*oPk$*!M4%B0<^5kW%K(u3(l|3y@R1$`40$F_-Nb>D#cB2}if8Xnk(}NCj1m1yj zMrC^PcQM`W`_rn@rA?;&nycnBwPAKH9OPA7B(F+ETJF(8w}+J-RD12h)t`*37d)M=~7DSruhvQgB*v%(L0yiyjbS8_eag#l8aw%{VMh8 zVsq1=^%%@=^`1qyr+7OOJ@~kowja>1OMcIPvWl~N<_Y&wdh#ki*@%3QWx8Nkew-fvi-yF5uaC)ro}{< zhgaP_3Q+5*+ekxrJy^uxG6id-}6U$UJ4IJoD_^)8ysxVl&3li$5A)L59UB zpY_KsnSo;61n2~Ba81es7%$U1CHd0B# zca3D?FA*&$K1=q*R+oN8WvElzWR0CZ-yR`_8QW>7A%)VGMP6=j#4bhvS03X#%ZB;=;|wS4;udm5Z&>|H z!8;+QE4)T)exK^AQ9^5|-1o?UfRG&e(V0Jwqht8%8;eBuNTe?vGDs+wykX8dbA`&Tie@w@)alujs37ZhY-ozkAZ)4sfg z9f=s<1W52wdOtnS`XNy;T7MEfl zJ_~2fJ7e)4EF(@}X!CTqohy>Qi3+Rb!y{+D{8W?K#3}bUzE19w#h1)!941_m(|jb- zQc>M85MW@BRoglu%$L0lRrNg?84!L98I_iCPG*MX|k+?peuKrrE7Ie(yCgy?B0bYhj`x)2RBL z;U{Xb#IE=yyxv*vKo!PJ#Qjp*mZhEsC)4#}to$pVQY5L`3_uYw0}%n<2RI>f`;01V`RSzHpEs4Un@7-*Mu2tHN6{9miiK z*pY#)PlPu%oqS^9oVcNI`73)$ka1zR@v+iU6S(@p4ZES0m@P&g$Of8_IPqdx;abVk zQpv=Yp9Kem`tfHIS9aP7h_a${AL!W`PM#c|J$hV+huc2N#ew46*W~O26@p z{r*@#KSqGWP?;pXZ`^~0wUF^_oQ%4_;iP$Qn^7I^5O+RDsQDFj0;3fj7GkUPU&EuA zl~=z)KhMync;K-`_R0;M_k%|^oX~jeG=F1z(z5qEeUr>ZDEv9EAmiTc&kTFxjdmuF znI84ka!0@HSuIgE!V-4#B8;61>)mjLog1GYk~veOHEevo)U3!lHBZkT3(wjfS}Ke+ zcKxP2qB^iW(kh;#{o^{$OEYYW2(kHd_TTQjADVqcx~6nOBVgpdm1!!!)$4hJcczK* zjC>=zF-PJ7R&MpS2RHDS+Yd*aEqrEUw3gG?*`3kEM^;{aWBXgG6k#2|Y#Fq8Gh<?P!q-$WN*DcHXZrg05!{^q=1ilv4S^z%se9&F~#(>SS7cO)(dK=FA7O_{? zathi6wc$=Br^B1OOLQ_@k%1L%y?E_|EoS?=w`SXfIY(&m?sTk1?9* zvP)YzSLa7NxP;5+bKacAUdVXKcn|G4#xSESIB=HV1sCz2A-7Tpdco}Je{K_w*w(uB zv{ihyTW8CCOHKL`p8FZGO_{P~tncl0koC$&h`Jkn&o4RSA821a^)ru;p_uV95CGI$;OVT{{b4Q=vvO12uZRRqy(AF88ja9@mTr* zs-2gG+qW< z*P6+^ne~Pp{lb%8^+%CGF1=V{;sh$3wpaPNZYU`Q1^xdNQYqHF~WYCG@Fh{jeuf$oYHeu}+k}*VD4-~c$*^(LCm`C0X_CPtL zUtVHEP$2{2R*10->1;i+1ZL9ngJZWS?0*eJ+)>EBXEC{J^$R4IAlp(98MsYKtjXN~ z*^z8fOeT-KLGD({;eE;@iIU0uSme_OMv&UYPlxro5LBvb+$|zvYoqjXCnGJP;XR{V zuVd`dVJ|u%FArl~ugZ4zK$ZyoxQLFQ2C2gZ;Sg4LT@mVL8kEWi3rYS0P63c5W8kzhaKRwWk-+WuWm5!SIxNcay{GxPc*a`W8h)sOsW*G z=LG1L$KkZ;ZD%pd-4MJ}FY>&$FH8Z9rb4)dmHzqjf2E94FX^RPQ(2 zFbDHs7fp}ur}1~`CG^0<4WEjSQqyK##6uo9As2OKPcI_5q=Uzsk#-hA zuWH6JNq0!Asdk=g|6uf0AQ{Sp_Ot$$dJ!~-gG_L?Mc&^6*PemWj_V}``pnUt25?d< zG`V0d{Y+ro-J^O1<6B(RcKq5agjs zA$rKnMcmh(*k1L85&wlzsbeI+Fy+)^PfYIAb87;n?cc`KJTmf#-<0;=1u@zn`gjg` zhJDp)mmeuI6_*KrpAjFMr(iXy2(or!m)zB^OaB=$rLrs& zkU5ikw;q}94OGkc>!MdxB-kDJ+28cNQTd8pyqI+C5AShT4V^1t#5*cjTg+4P}K!v!2hm$x+m#-pF9`XCyC58|>LLbmP#}4TaUs=CGTssW*=l z-rO}Noa)d2fj?`C30Zqs9NmmmbqkzivEfnI8RL*paqB=de7O<9Par*0jBxq)lqTqU zSl`V{8++TCKZO}z_4o@1GfN3K#Gd_i<1+3EpCtHtM{4G|u-gv^QNJ67e3!uVq~3nu ze9%wIz2PWa&Suswi1;N{I4St{**x!;)GY`0_CO06mUK|BH z)lXe`@$+4$B=7UPZyd+M1QBtA7Gyu%(uw!IhVm*#ub=h-YyW_s`#KE?F5ODS6eyZr`r`2=_a7gQJvPwW znd>_a!F?J6kpug4A5>SNXh4j(ATrU~ZO1zU%9blX*+Pd)BgNQaC4OPcs__qSy6%2O zG90emz9dEm$w+=C;S z%~HbEmjxB0C`kxqiU=Dx1VayxLPP;dT!lv1BzQQN_Hgd>hmSl5zcG{(l#8iH)G@yD zfs@;x$Z7XaZ-1KTdB6T$!HzA5t~^;-;Q1g49%`CXY9t~S0L&c^WUYj@d2g=vY&?Wf zqk}VV4`7(`Jg>VmX)}sFOcULISw6;sW9AffjCuvA9O2(Na|Y9=E5YO+3^ITi>>TWw zqto^-5hz=~*3)9} z6a#T~1ewu?#k4*Z4rqvJPYKTr<)@;T$!lt-&j37eEk(7>TRY||$L?GMQN)B+=`rS> zSDxaW#WR5V@e2xoJSEuay!g)lhqg0~YU&Qx{eSPwWMGFeK!7An0Rh8kM3fzdfQlHE zDk|Cl4v44$Q30{;3_wtZfT%b&3@Tb`gQb>QYr`OdQ$bO2s$tOLP#ct5Y^}Mw_nfot zIp>~r@3|lDH$JcyYb9alec$JKe$VTtv+*B3j>h20RRprpKdnCd17BYmpnoZIgQGzt z&b+xHdoIOZCRb9oBhdP+f;}Is?}nX#tMx>ahQMcRjzb1B&k@-|6GCRq5O3{DqgGR# z4iO(sKS(JRj>cG98U1h!%FR?b-AIaV6IZXWVpgDm$#CiXB7=<_-{v19$r z@8rZ~a>frvW>G)sngwAD$;g#}b+Vs%0)TUPbASPC1k3@HN;eXJKv>rS;hK(lmzNZX zce?T@<8tcqka=$S@2sUt7W70};{Sc68QfyWx)1+6zQhEUpykGvo` z@remHQ~dzgZj(xYLw`pxx5o1TdCaRLs<-AbPw>Q0k-4!&?^7!h~x1S%`J!js>m$%K= zq@d%Y@B2SS+=JuawCVbTVkoqLvP&mc)K?97Z&^QiCoL|(_^rIOC~WbLlE;mpX!S9= z>C9p0rSlu^I}frFn642YPqz;SRB-&vL?+z<>;2K4V)S%M$&rhT6%3Mw$MLzQ-9$6yf$5XcjFM4_aj6rJq&BXdsjJ3M^9>=XK@2~9=T%GyU zE|)c>#LKnuB*2E)vJCf-LcTP-Bu_7JS$H*pcVPXLoMJ2X7BT%wG?|&!dwC6}&z%ly zdXIp^&zt$JAIln(fK9W$el=%tG2dr(bG@ZLYlVtjUZPFwtu2u?f2E3c@oOMEZ=ce8 z$;y{sJzP4d(UP6lWhAH%RAxN~ix>32bpB7d_Z zUf+3&nO)nut=xKhb3NJJm{x2Xwi<76S_=<(_@nx=F(X8hGCvL-JT!l**sFq>pHQ3JY&LMud^y;p zv^sU+J>|&mtTbU3kJo#YpX@RZP_OR5PSt2zrcPa|5GKrDAaTABdu%s%QyJyqzCS5$ zCEKw~nl_OZU%5N!er9<|)00e>m_Jj}C~lZu&n%qEmpc2tnrPae)nla8e5zv$j;%aO z1?+t1>MN}Eo0UZOu*iZd)$@cVvQGydbo+p>ly#*- zVNP*6W7LBDA;Y_LAXZ#Lhuy18qL<9XOdb=sEl9w`*{!9~-7&2DDP-W*S}7guV7wmJ z@XzaJ4}W)!)0@!EQdQGIKY{s`QOf_*3=>|PNYUt(ALbZ|k(?OUji# zqpiOH)TEWi3YB(G+KID-{8FnaP{2sTo>sQj^3W5uS)HBI!xL4!6r&B(QoU+^5!PBV z1PfLd^ejIAon5Tw)+3^+XQ_uL_8h2H=UL>E7tV_r&FxR!m2$1eiW<1Ot;r|gP>)}d zNnQNGgIF&k`)JZyR`Ny}St*nNZ5nC5SnX6`M5*Cv%mu*6x+aPXh=#C5=%}Mu7{{v< z(S`cA?lVNqUMDB^KvjCLw3Xv=A!8!2BsRk_yq6G+ z=9EVQ3>nZDGn3NFlk7Ygd`?`LbPsi{aFpqasFHRnRKaF}^X58@rvr~3aul<_jb5fY z=c;)vDn@uaWREG!XQ-5p*T!S1{-7*N2LtWY3X9*AzGPipr$Ym8Lufu!jI_XC_$uN; z%F`I4qZnU=MT?G0=pqIW=}VEilFe4B8V@`6RI#4|vH}``^S9%u5P=9QviYR1v{L_w ze$uMMQ5#0WMr&0+w8$K7-^S~nnK?lBH@|g{m<>4PKf z(gRMPY86|3vsYLp-qC!zsphK7{O4d7DbbJi>Kp#Uw&^+VE|Ur(mEwZ@E0Teon;~|_=%~{B7$;wJmTL6IKY2hY(#BKy4r2|$~`7EacY*l znY%A}x1Quvi>gYR%K7L}Zzd<0pvM~&km1nNV!Y~8G)4&#MxPG)ft{e2)1v0uCP!DC z9+t%BH;qV6FIh(9m0!p`{VdGZPeO9Ph|>2b{Qj=yhAz2^OuhIl?DUdVgnQM8b6xn+ zZJ#gbsQsk!LEp`7U&q&GZ&?50{!igIzB}=E_PUKb?^_Iczt%7{12WO0!)NopvD6df zOJt9p+#Gz@a%|(VZG`W&w}0NwPM^IwcgvC7`63B_h5=`N9(hnpZ*Tu?!T!yJzesCz zns27X?^hf=Nu8hH!MTX4HilgAs@-02eeImBhwl~n9z7lwi+3wK)=EjT^9B9IbMhJ< zrcx;VXiM=WM<9HhyK|B9_NKs#3uiRFh(clZt>oVuc07I=_T*3L+1W>&%C)aPjoKhmSYsEI?C+hzf#WsB0^nX!og(sUw_V}M4AK5EyFu?=$}jh0;m=o;TGl&3b;KZrz#ZCy#D_@%)s&#P3CO(}AoPr<+fnd2y!Y{EHV~ z7#b|=V_MqoX1)A!`>^We*^BR9y!^^Yp7zSn?XdAxYp?egug+Z$fBEX`e(AJd+Xk0z z{I$k9^^0H6Kid8>VCPlN+C*ubc_x=zaKlWJMHS6|NF)0W=#kfVH-4o{ee}g|mp;6E z`P+|x;%_z*9kb0HRG$`eCu7Dda~BftKhbTsEPLWIXMM{=4|m6_R`ndjE>}JSNe(v5 zDfdrd%sBe!B}3Getz(hclCNVT9(`DNbH*>Jx|>nssnx7s(7WI~ z!rpc{z+*!;uWh?0!gobtuZw0ph-;ZO!(-WXG?p31{agL(N=pW8y}tJ*gyiYXD)K^O zz4C4bUNAu^X{!EHvDKPFMB?9q?x?uk)X;jDe1aIyYe~=UQNYJpPZ;zn{RTu+fMdcKWWVo zr*STim$>!=251x5XzQfY>)5myeIut2S2MHgnK(Gsu|K6)4EWG|8;zPsgMkVc4Hapw z*NkL$U?|JxcrbiWFG;$>LfksZ-`g^NSzysstkpoEw~ibZp`?cAfnygt)sZ*#kSkS& zJZ&a8Wtv%Y>!O_lbri492FgM2=JidZJ=2QWjI_uuTVwxaQInE3?Wmf)j*5n_Nz-nV z2docFQO)KJSgmOp#`l#HAv(-Rw^KW+w59fYhOOqcsIAQj-41UcylPN@!-x-?v;d^l z!eB*RDSzD}hVP)7eFG_(yZfXvyk!`CrMnW*JW%TRD$Q*No*vjPDcOo!!9II?X9kWR zZzdBk9h{STbWgd*_Zhg~dUthO@{n;-=M9vI^h!0~GB{k!S(D_z{P z{(Nf@2B@>9C{``~l?xF8Z9+uw>`znX8XAxdxOlzYpbFr^Zl}#k&zou5d`*Y#BYF3% zO9)W(3fR*?snt0Wq-&wPtG0br66wu^-alzPX z$CNb;4g!lVw5$c~5~^@c4oF2|#}ncLx`Pua47g>QUeyt$(dC3XjFF>tXcuIzs&?m( zrnSA7D30eAN=#3EbjE95!~LJyceIMSxX7OVOM*cx^LXQho5qs_LFV7?$vd*|eV&By z{Ofn2Cds|N^Wr~Rt@9V|zVQB*+jhgEA9f{ebRRl-N!K@gQad}TDPbU4W$iL*1tG!a zo*y0f;88m}I+_9pg?qLeO`kGo42}5X{C|qTPwhTLT({WybnoX40Uz%*#_L!u?cX_L zLatx-DV{Rq&W$De*4}lVi!65e=It;2jUIo8_B-AR`t==`H*)G%qdWI%q-8}Z4GYcO zarpUE4}G8&>_}?8HUfV!`PbCjmrtt9F1Q2N)4Xs|nAxDRzcScRDFkEZXdn{E5(PMK zyf@_AHjkd^vZD)+^B(LcK9;2w+a#xW5$sENv1{yY%s(Av&+e#h+-%<( z%cf75yH70m@U@0EeK791biN+zJZ1A-{^`7Q`zFF^R@pb2_|Gp{R6B6*U#*JsrQgFC85?!&Vc9#EDR*3+FG~UQHo~2^ zFI;}v7Dw6}eTGPxCGYTeY5KAUcI0gNY5JCx>eM-oUl7}cg=5iR<4m*@Eg%6S5i0MXfv0!T8Hfwf2mx=zB7uQ(?+Hn9AmG2GBidlXM>;Mh4O6 zrb5N?jJB04nuJsGX5~>HqevUeqw-3{M~z6bA;#m4kqWq@_LaOC3inW%ix{%qA}8{q z3A9B+d?LB;7CSJ9<{gVNi<&juXatT6jdm@fR;;g+S?$zBvn}#?A8x(Ib`sc@H80uv z5mL|3&H(e;Q75sa1WCoJR%$Ws8;ll`f`$^(FrGsnhH6KcbP?=!E5}mIY2#6AwHoG4 z1%=K~&1*>@gF+)|VQ4xYXwb}F3YB>zpzK2;m@e0A=Z34SMDqHCC_6X@X(0TnBM5fN zi8Li{b?@$(rKzTY6y-GT^&@(8dEwvppt;J zx^lgo0M2pZ!)jW=LgOfBrF}0OmqB)!quthG_@@}L4o62_hU64pb{cC{od@-Vu|%ZG zQD+&|4)0JxXp3a;Tz@QRwgEa&h{PCMtP4f$MogysH4wT#+j|lcEye z#m35YMU4(1vq#k2!8`l2DNfzo11Di6d1AHe%HEh+`WzOW*yZ}2EZ(u@ebwbX4PV`D z2cCHYOcV!|4!1{JrSq6sbuqR_i~+N_5SC;|03u<`w38CL^`i#2Yj^=pQci`O+s=86 z`Fb=X>gqyqlt*as^w($wXn9G2cs(;j6B;nt;beb-5CQAUZ0Krx(BCC|6hzxz)WDj+ z;#@)vXSECTE^{J)7^hlJ>s8ygqw%}QP0)-%50_h;d^I=L=tx{JiOv(SsxQ zAT!?iyv%AtD!~GtwG%*#jI=Ek0xNI;Uu!>Yx`GlRdb%jx z@f5(q1j-8e(R8;6PPZ5~Q>LS%&egiZ^ppNbOKQ5q?||?*BwLp?34vEe9FcMY^;Kkm zS1Ye@-We)_9>v)8^_G-jA5 zkld5Rc~)!Skdzosqv2g!cLOw29K(ea-81}%rQ6r^_Z(0>^nOsUjj82>m6mbh9AZ$m zYrEX)WkL+FF;~PM<-0D8=Ilh9!1gKUp|@PtD(q1x*bOknW0O?SR!vp$GeWYQe0# z3g)L&)zd0e3OJeYoP2^aD1^@v1=E2+;9ROLwUsIvK--1f!e&IilFe|#c+8jDXi%iD z4E5}E!H0Su2EWu1>CMubq{Y!eOrU7AImN-FK74KJD?H}g8w-Rd2+TUQjeUB|EY>0> z_aNUj5dhwU2_ms(WcF)L_i=k?u>MiH?Klsz;38(}yhYVzalqUn95Go{SvIJC zZ%7$aGu%asuZVSf*QAU(;6m+vBh!F@PK5QMpHe2bercv42HA0*8jNgFuArlp^(fCOudun3dML4$ro}+)(6c5{dh5RJz_13v3O zE6TcyMCf>|t7h6tg!Kn%Wv3@z7BL=SR)udFr3lC!761cnxZf&*VrAvUDn@9(>gf|Q zinZKxmVq$Mf-y$S^u-t}Q$`&|z*i<_j}eavg+YW-d3t@xilnCRWQgq$e2tCdT>{g$5NYF`4m;oLMO+|B5i)ik27lvmW+SzQmZV zi1nSN^f5hSxsf{BKNHf^`xMM`I;z=c_gI3n1dz`0T;g@oZ`gz_7vMAuWD0Q0- zU!AGHHIjD$s@$~D$4pVmh|5IOViS>vtgK+e`3Q9`MqVl>W+OxbM%K#68a{Hijaty)jFeo&XFEp4o0h_P#ODab z!Q}nT0RERKIS6^1frJ+gWhwsYnpsFA;bXZ7A?!9%Bu4UFl#~SsY(Ob7lZ5@;<#uqY zjJgJal2GabIqbWfI?wF!wSl}0fh9WWxywMj{J<&^b*YiE6oHd?)Q$ZxF8OKznIR)@ z#>m^`1iqZCF&tLP$=SHXg1|cvstg5b7&Th~=U~)aZ(1_u^Hh9H5ltEd%#?n(T!J_P zpdI7PK*1YJ-G07;=xPKEeX<2Rj?4H25eNbcoqZ9##3@KYa8(fE5vTT0pCFkJ?(ic5 zXGaBL+!u-F>F?RY;@Z!L@57F=<36TitL&ZLmmRm^K_oVzq*MRQ3H~Av_LjqZ)54Jg z;-3XDQPw~=fH(Ps$M>Ol#oQ}KqU&9bMegLIfOH)-6C0eo9{9i=$KSvIeAQZrfcR`3 z;o!v&L73AjFa6$PI^)(S^XJ77WnKYB5Ws^b-ZyO%5FJtAWB8mMah#8WoKC>A((BoT zki#b|NMnGJnxtqPJalSR2VuS_pP24{zjK;siDLxMQMfhW#QC6ys^-CC(Rh(F;TJeC zV99iBPcWgi`QELdU*M40xBW$31j^T^U6W~b6|uy)H5VtzttD6wqG7S6H>Bb z`t*GPb9MxrvvOpqoA*ZW3Z2ic=1Zn~o_$t$cKm{ULG@Wi>sd*)N?3Cb`E|h=Y^r`N0-FL zf7p%qt$C?>(mYHM*se?Y&*`AL$ax=QCXj)L-{<4$pjov&EVnpf&&rhIuZFuQ70xku zCrX#w%3VX_IkM`}Yo+zl!SaF;!9mA}D@6Wa{qgr( zH7hE4=!nGr{K+n&ZEEJv*6uum#@hYE65d?wn{~%ghj&Z)+kcYWTg|k4q3?9g#f)8_ znHOnStI))DK_UKlbI%K#Cmzm2XNr)D>p`0GXul7Jj>WtWMrfY|5>*r6H|B-&0)YTm-`zQp&_b_6G_8QjiP4s=mLMoID7# zt$=mi8?%}`Ut6=z_IhTHFA@R}^jLc3du!T;7lsaU(FI)J0W2coyBErzOQi11-&(=( zG{zG_enQPN_bb4R?rh1z&YW>Zu69_1L>+c;u<&#zWEn(owW3Lo2`uG0_L(~xds%z9F=RlwW*>BW^^9t@ji+U z_qh0QaKb_Pn6j6B^zW^Zpt{@A4}-_~xhmTklSsJJPqi6+&YGr1D&HpsB?~vFBAP{( zqX@68R>l*$Hu3(-!zsaca-VQLbn*-6N2dq~#(CkIBrR|>Y z3^2{1z%VRzU61tR0ROSaU&Wv243c8*P{;O4pZcFz?9Zk(%7}j7=p6lzCG#}aIlqe* zOdm=K#)RF;1d{!TsPQKM=wKIp*7Uz9a)4B96^}aG%P1T_B{Qr&#^u2f*>MCa&mL02 zLqo;kFSb{$k>Pqh0$=Xw-;+5GCz^Ea&zUH+Aez{qbDpJ0v?>R zge+}opd~M@c*P(LCS+Zqmz2zHqJ~ln4)01#V{IYEP>T(S1bzV6@?M&2R)W-bbr54d5j}Y2Ly5Z> zW6hpSce^PgNsPs1pEl9h&j2P$SX7pusB%3i-Y{cV5FLmcZNosO%L94k%65SpA%*O* zxnoagLBpY9Q9MYh-z(*LfS=KBml4U{nR!|s=p}Oirdc~B;M~e2h_CBN+jzt7T$!Xc zHb%8)(8jZ+Il+hc=AK6O!rj`LO3S;C9GbHRv55Vn67hMbQ9dj}% z&~2ZIpqbYRwq+)j_a#W1Z{*tyQ}K7hxF_90M7b5FX9j(BnYW>fKGxg$$xpnBW!Owd)KQRAF}**- zwgl^>H?5T^6YyelW^HV% zx4S|`#kut3?3fta*v#<&<3RZiS-R6#3Q}l~fwivL4qR&Q2^zyb-TJ6wIqYea0*U&uAl-WZ?R7gbjl4y9B2sPQY_y>5kf@=PR3K)?+Mg7)IoHMLXD(jrGX|TV-3>f zSn`@2ovJKyG<5Ox`jRbd*LBkD-;REl=(uCAQpQ>xaJ)o)VxE7S0gdELL+{ zc-j~l|9Y|QYRW5~*0a2xvC>EuAyBT2;&dAZz1wdGF=r0jeIIK7Wh_F}WBxhN%ZzQ` z`TR3N2I!TJ{V=6*r|l4Yco4FivgXU@xB`0{1ssqMi#W4gjY@$h(MF{FXHu5Wxzz=l zZbLq}5wh3ma0jqa5TS&U@F4vBuGd5Z zwV+9TlppuC+^V)gZDY2JM>K~gtx+LyhOva70DO4FjC_c%fMjB2o*0pJXlBTZ=jTH! zgotmiSA=3&rbTHJ-MTDrkt-Usj+-;ha}hsB>Dj!P&phY0^914L0M9m<-yqyGuakQ` zfj_^&{dKEH6mX4MY||p-KhEK|*7<=3Uz5bEcza+z<-U&fUS~G|ZPt@n z)M|%=F`>cgTqKvxR2V zP7acOunVIwb^BJ%Jg{c@fprB3-U(fXwFk2A9|&S2|3MnDfFTG6YEUfzFCY9rN+Y92 z_mF(xlyIf7Z=Y~>&%A#I?b;B*()_t^pST_f+W6wp|3noJB%UIwq%NdZr@R3ju{#OAxB23C zwjFmaX5umYqIVceHAyTC5(avmP+tEC8R`Pfr%RU*vv~i+@H=MQ*cIN+`IS@ht(H?B z{Ew+4z=@aF(ApnGgWua^w+>i!p-Eq9?ek+y&z0HoX`kLK+HkLfe;{g0(hm`7t}|zU zG_Z1d(q5ao_FCK*Ub^`H?f8Ek!+-MXkKg`Y?r56;Ib`Cg+c8X!1|PjAM<)7o9Z^i@ z%(R7b9`Ryr=2CNFtR<7+UexSK)1sXC4@**`o!xR`a-y3vbtP^$>qho=GY8?#(~_F% z%UuTA)64u0WU9+N1sfL8mzvp+i)D91)jKFaegem$=~b-KzYULZH}Ax!b*0ZuCnW2R z)1Fzcg7xaGjj*ua+LMTNTL)(9PaZ7AtG+1-y|ia*(sNC!x`=jtv0L6DU+=?3A2Xiq zNXs>>Z6cm1cy?s%=Jo3&v6&)?J}14eqbbMD?%Ba3h4;@`eJZz%7!NMGYjxWii4|O* zUOtF2X6}kKb+MMc0Ju^NI=bjfHv^&-xW1yREzAr$TU}m(SQ5Bsj)ofsakd+jc%Hz# z4MRq!x|Hw8!B5}tHh()GAah+oyPojPNTE&f{&r6oW&Un8!leGPVy2Q6+Ru}9K8Dla zbAp`j=L={GU_3B*^#}?Oo~a9WM9R9!_F!TY6TXA{Bh@?@qeZ5ie(iMI{L;tOtOTHEgj5?Fsa8*tRg>h3*kCmo%ggN{ z2Kuw-`eb=3E_)Xyn$%sEIz^9LjnR?@lX6o!yR8m9%CPc?lIaq>uDvnfZA@z*#;pCl z?D$#b+&4;VU5d?RVt#W5!!x7*dRJ74IQ=I-9jajZ?@>$`U5^XTb=L05&y7DPeTxex z)#8S3@=AGn&&}POH#fYY{*E$7E@SVXHhsTARAPRl0f#V$&YHQ_42Ju&rR(z znpYA&_=te1Ui#r^YFg)Z>1}fd?>OU)l2(z>O}L0@qkw=#%Z&D(QxBgE>aA)<-1cqlPc z^Pa-cMhB-%srWd4lk{(J8OJ1qqd67JQyU<}Cc#RdR*_RDp#G9CopoJD-O)SZR%YrF zD{`2^CX@RdF~qsmQL#}^pqKPso&hTblYC}sJv&lpb zdiTC`Ic@}%j(7YMN?6hkgPd)y6js}DepyE*+FjsG3fBgbe~{?Al23x=PC7^<V6dWh*icbTck}-0uO?y&K83Q6V!p3n-^)Lc#_PpDICRE%>+a}>jg0S zXYtFnFdAHU;e_e4D*{ygMsB`gaaf?FczY29c5$xsBp4{!2?X#EiiW59SzfhY*gOKp zU}@`KW&?qJc1X%+RTqE$93$ey)oF65BC3{Pb8Q?~c+wc_+R_MbRs?`GK?^}>82gPb z+UEgOxegdPw|N?Vi@|H|9X*^YB1dG;C!wLOT<2b6vZowXA$IC|PG**DGYo^ihnZCp`!NAXpR-svFR&4jE+M4a&)W{atQa1Pjr>gEBZ8`#
    z#LhF%2^l{Y9Pc)%L62^u)r9GpXG#i8HPx^@lcgHkl{xGsW0dsqX zzo%>J87`bI{3<|-c~qa~X`xs>z^JpcO(c7xnhPt6gSEr-^;o1Ev6Vbe@&2(sDuJ~| zr)D(CCka6qv__<1;QwGA9F)#5s>m1TRFOGopwna(8?`0)X8sMxHX8EyrQg3g)sL=0 zh#X*qgY|oU+xrA#hoWU1QM0w5r*4D*$S12Vy=RnfDdMD<-VY>S`lJ5&2RkC4>d^d| z`PdWidI_Ef6GP-RIu+E5Kcx(;Gjiy8JYOP$0a(D4Lr(p`7c6j+L+wOh)9(RS@49y? zmM$h*b6`FOIa5K3iAv-Ydgmi$aY(c-Wtz|xt1KCx>QLtWX^Vd(0B66MIz)htxit5<<1@ zeI@eYR`8&z#4ad+*iphBF3FtC@rLMnN_r*N!t-FfSt!lW9PnLPc?gpBd(7E2W_N6` zw90KZ@tuL~*;}#u3$O0{sXu#PR(-qWRP2`*;&pCc6?7#rACg~~x-E~vv6ZULB{MWb zH-9jH!}gf5c~Vr<{d?4F-#4hGjQ`;m%_ZmL8tVIulD{rnv|JMwYp$B}N$Q88&!?Vz z>?Hhc!sS!XlU<*YpQGJM&HebH-K6J6Nl`Ihh_82UIU1(p} zTwfZ9fzDE%4I8ui{qNgE_XXnz`MXJfqn*(2-|3abuF?ejBZa)PuHjO(W6=K0)}PnH zt}#y~tPirT17TDM^V|kxT@*;$6%X43AOT^Z8UTw~Mqq}1KrqLnK!F!aHy;{0pqAjK z4R8@ZqDJyi0#%i3ATU?L5EQsW+effYSbc_2BQ)cyfG0?cCZV_hhVXdbsSp?J4B;3& zU@^rUjyZopNn4w+?*_;H73K>9n@yOtJ3z{2ek(I|VW1xn!I5$ZJGH1ThD>$2_+2L} zta&-qVu_?vyb^9n5&`teq^1GJ-1W@JW>c}>=|MBhJo6!!OkCLAFyF2!TN%CZnt77G zpeplDJgMjn=(gHsSi6%wLZWp5z z*cFfUvNsy%O?9GPn+>ldeNH^k+QkZ9Z9C5+xqY1K8Q5-?2==E=g!;&dDA~<^GDr$% z$aC>LW=lsO#|7kUU`BCySG+f)yi%Y1NQ1`dMXv;6x{IRAY?n=1B#_(sB3;=G>sLmN zS2a@qb>ugZw}?(hl5Y{acgYr$DD&IOOYeXD(s()O7uhYc`%BMYs+CJo2aVhGM*kYx zli0R<{J8k~W!)bj#TwbCSDZ0D2)v<(!q zsh(7FW_QE^KFfy)J3g)>0%?Cmgxm=KFeRN~k+ZcpZPEE8zALs20H;)m{S6VBJWo=U zIi|A8k(JCI1R%RS!+8K0LylK7wi|gN4!s2NAfFLufZV@VT;VtKDp%%pv3~g?5g{vz zH#7KzVQsfd&p_#Bq``XFOtfE&9LyU(VefmI91Ijh_eU5FAUz*GLIhAwU|SiazVeRp z7`X?hK}T_Fe^ccFfc0(kmdjlT%ugi%DX6popaP$jS%!QPxH~{vJ`RHyQv`yhe;x?6 z24mvbh*bXGJPdMO08mGNOq*;Z*l6=hLOjw+@&^UhpubowU|YMYdiMY0I5zgH!pdQn zn*g)9m}a2>z0%c(XD?`hz&dxh>L0a6hh)0?}CyhTaktfd##4nRnX&<~L3tr>OuIfW4F zS6#dVfjIZj5^*B$lygEa!!*?6Gp?@$VqXvhf!4#~@`0FPi?TsN0pis3f=uAB_#s z;1ZU+%vM8;6?VI7CMhAF7zG|y(xiWs`d^n5HYO0bck!7Lk0vfOgj>saCDBd8mstq} z!F98W&@_x}Kr!bJI#N_KVC0HpT$@DH*i;F@Qy9(v6KF2?KBusnqxojJN@ypb7n-B@ zeH4{CaplZXi;zIPC@Kwl1yONlAHN$Y=d{-|cA&0Y7QZ+|_}A##c9r!bgW8KKa7?r7 z?9ar%U1EokO=$GZ9#ILc*=VhjchUpHwuEFCA)oN3xrZ7F)a&(&Yx`9@KkrPCzKQTA zU>zBr9)#x*4%7rFfvi(r`eQaJCIJE>*&$_$lwA1j6Cgq~H&Vz1G7qJ{Dpk6t%ZW}? z6f%-B#bA6!-DWKT!CQ7PsL&fy6P zsO)nGOcfs4841mMk(WOj=AVAEc3!(^v1=^xjOcp4z^1|Pv+DpIi?3itd}cRT4`b`W z;otHX)vIFHDx*FtO8nA!Y{TG}mJ{!q;Us4kpR_J1VCV0z?>na=v_6-K*Egn*9EFB+ z-aWfV-pwdw)_jw7_s&k+nMSDZ%Dq_d!Q!#FQM!N5;i_xf=ltbCVe(cfjE}$nZ1f+$ z^T9*B;k^gjvmwmxc)pYrJP#OS3c|*X5~mB3(F~yr5H0Wm3V{{yG#d%{p$oruA^*5x z?U#aBW)u-0?T(+uh~Rsp4VwORW=g{zdkiJOVYSo_0`%xsX}-u$3*(0~QHP4*0HLDO zz!19+H0xsuLSOYCQZa(5v1MG(_i8K1Q)eVgE>JZzyD0^Zut!z#+Ufb^MxVvNL;&k* zJuX|J<4$W}koR0*g>VIxB9SNBH}e}5&lCwi=<)gP)i2NgNowhw826(&0sXrEtC5GP zaNMjKp=I&UtXZ0a)W|(4aIZ~`Vlf%CP`4&p<7}EMG?*r3s{NJtJjQ|1utrhMWs18yn`)}>bMhWL(o0y$c|+lC_HT{nR}e)#uJ)4? zaop!z+djtynYXA`p`nYdr&WE}C-td*g?5p(BK*{rPqpc@=5jk}PIlA|?M%^!5Q5Jm zp*X<-F+vn-LrFlmy|Dl?`3i$6Z9W_n-~UghTV*PIJ#x#9U%dT9%nZ0g--TwrT+{wA=GDT61KMO zEPqoKxhnJajj&gSYQ~ICg&Oi75PDogp4O@9zdBJp@A>}T4jWxwkLS`|IE-4Q(^tua zYartE*}jOInHffZQ*F#V-fMZ+jgU2C`>xGIm#j|mtnA*mV0yM}cwXp*4@nC{&Qqh^ zPqM88!F=wp@iG`2X5<)QaCp2p{AYlP^9bbV=F~7}05b6N)O{22MUS-8CTq`LQ)hIh9D>ABHmk|Y%dMB99NP!=vnkj9vS4PS*0d#aLrAS z`jE{mpVv~WagYi5QBD5=rpufQp7_UWLrS91P^e$z(IH6ouDH3KKAk3KeSN(+1oy*flFVt&~oMrUe+HeF|GEJ(Cw~VIvei zK6`k!sOec|sYhh{o^=MLE5HRlZ`=|rZ&X|orqb&Sv{l9~>^>UfO!&4kP-NuhDpV1A z9m}66z{@?leIKM*;nR-GY~iTjOb$c?FjqE3c35F@v^^XWb^WB^l-fNvZDpmqy>387z)#z%%2oVDSK^9Kp5vr44BzV z$96QO#BZ@cOI;31WztHb@2@7Yx?yCd;4cLj`AHwaE6hAU(iEUlS^To%03hLCv37UfcoaXIzb>cVNT#<85vmEX`vjxXtJ` z;c=M*6{U76>U0B%9E+%cte{}W6;{er2|{=MTj{rN;96i`hx=Dd8x;w0e`zu@eK4}9 z=M{6>P`Y!jMJ+B6ab^LdBe#KU@1kc__;uQlTPR8W1J~fHI!Cmbgm$$vOQp$<7cg=} zih@y*(^))HR*|A>r{k7n=e7jhh}b;tnjvIl9kH1$%!93de9kP3QnaOkb@p#*J6o?w z?#Uyw+d5rfnI;_1W6stk$34d?Sw%SW>gi7X4Y@Wn+ss)&Omq?>6lXVwR!oe1Cmrxn z{q4{?AFNr zlmWVjm2>PW?b)ZTG5a8qPDDs&K8`XXOWd?;910`9TRlmj&d42LN?c(# z+zDF+bLdO~ya~~-78!`S2}1zi2v~gWxGWuXpb>lPXaMAJfs!W!WY4yLx!YV}pw^R8 zEC0Q@>ccmP*k>caNS!l_KsV3)2-miApR&JUk$NZ@`#~i#8_~I*2)SvYprTuOunrzT5cu%_iT3gk2@Ewh-k#{sq%jxlz*e(XxNDNl{lm=C#aH$ zwtTD%=hl_m7nXnY9l`U%KgN`Q|GqZE1Z|7tw}wG0b<7kqeE$DPUw>CiCHxm6?xaHTWIyiI z9-B#R-l0;>v^JX=+LAz}CJ#obQt}}ok|6bfEW~6?y-Dbgn|y!_UZwQ&BaD9l;+o+Q z19VVNc9TLr283gQC&1A5R3uW02(5%8TF4?w6yjyD_h(K5F%p_9YK$z74$Lu9C-61< za@HM?tUH})7n;L8P7pmU)P9^Hw!o(_=3_uDCd}A4YcBwsT{dUM)eIDL&3hlC{? z6CQ3({AV+xWCSpgUT|TakUgJEc?m$3g}Ez?`qfD9#OSLqHfJ7#aMa$AGZzv(E(7cH z9Lh}tt%SXffiZVn*c?FaTc3d7jRgMQQip{YxL|gJ$oxgwKc2N+v~5Mu9y9du%&d9 zw0ePwK2Jw(!QASltcUw4R{(3Zl-$Av_5EDhT#R)?PRHcb+fwFj+`U%DtXH5%gsekC z_;dr}B&FTu5(G%Ptp{^~kZBS!CI~3e^Ju5dl*?D@nUC~rdcd8(9Sfhrzq@mHg)Q3E z4*fn*?_}`W9oUgu2VOTGSnSkT>eTrEzwiJ+205@h^nY9oa`T4rbhblo zkD;OS|2E70-+<@;(XH7_s5HpDTdbvQq}Kkkse!xY{{}pX3(xA7^o`Ex)lMzFoe~?* z(HVDNFWYmF=N+O+V9imPS%kxMne=vN#dzn82B#LT*O&*MW41VLUxfHi-SaXZHHY)l~1POBv%mm$88Trj0(u?Zqa?C z>P*0@jVu)IpVeI7bvr$1>qgh6qLTJ-x?ST~_qZau-GI+WDC65Mn zdx?p38GbqJeF`8Y9?Mo;bNE^qo)H0LCv2SFm~JDW^ExbyD=*x1%X^EkcTB?J+zz9l z=iJlkk?&F>HpFIdxkL@QOQ!k%LsR^B#}3@$!fB)Eg&iA0R`da(>r+3F8W%0N&Gfzn zq*udI|H~)VnMJ(nxJ6x`Sk8~R0&R3>5J6@CQveAWufg7NF;$p~9?&aocmYIr?>^uA z|Dq{;M^~|Y`XB02_(#oqQ>eB35XAn2y!J|@ve_0F^r#1))8-khL>>H5V7?xCA`YtX z`FSocIldw`4~jY4mzjp$GkP$hv+Ip%e1B=c|Dq{`QJ}==`Rd*0n^>bdVnP7#T~EWH*D_pyovU{6hBpi@V@?QleUpju^2_ zmLA-zwru3hC~6Th5(Q?|K`h~lM~olBHeXBZlHMF?Gi3gBKX>K(6B&s^`YdM5ac*al zuaVo86kPYak|kJnPifyhK7VXKdFyg9H~6kq%ye*fxp$H~mjopzib~#bnOi{qyg8 zs*gX1{u;OBzkddi6`lXSJN5CuA0HT~b@=OpB}qSCzdCCDZ}iBIR*RLP2c*GK|2bN4 z>hm5jf~b0r7AUyYdUV87p*8BLlJD{ajp=DeHholK%m!*Y{{ow6mV3Q)cAw=`;%~=q zX@bw~X3oz^8u6ifnRC0_c=;gDb=E!&8Jd$~&GAdA2AP%7`y{-~I^)C1A*iopmnFn6 z;d%QzpBQ)SZBS3@{S!5-NT~D8kRF|XR@eHu!@2yam(#R4H7jrKt^cy(dur5$k`1N( zPv0hHU8=rYQ#GOMA#V$Yx@~c1iet6cx?^p{hf-)Yxec7@n1T(2^TSs;inYF6Z}8DA z$IdIsmUbjqg}(iWDv*id`|) z95P{D9%@8H5=FuDQTV-DHW0-^FBZ$JTR>LJdPh_*K;p(J6Ll+%+4ID1Llag1l{T

    &sEI+a43@%g=P;(bejl8;7Qx_r373_}d>c z)*s)F1l&Ko=|uDMor7N@TWTg{t7hLO)(ms7%8X?y$vatvH{hvOzk6!4auDxZtgM={ z9Clh^stnd=GSUpm4j*L%2{(`aD?VWV&tTW7+uU^}ksaJOL+Mei5QLeeYrm@oe!4tJ zynowr!m#0R(fn~U9Ib~kPMp{fym0BF87H1*IMotoE)GDUkWgaE)WH{cOknPCY9{*D z)jZauTz7I_gpJpmPaBu>VPG~OP>ql&Ma|ANCbE5;ksZ+H?gIK?TdZV#$vK$YJwyr6 z=fSQnU+d({n(*zTWUA=9ySet3f9cIq@}1sOZH(^lA|okjX0n}wRFyC!ubu;p^g?4= z43Lwe^~i*hu-o1p?G!-@MtjzyN-W7_`dr`3{xhe`c@QF{EK`vjgve-%xq2G&siRe= zi4t3OxngBFjlDCo%>0*6{Ls~$kiu=Wb92W;w_W4j&m}}wI5hiNat^?~`v;f;Dq?jy zE2$)C60D+>4r*UhS`-^nh@J8h&J$Zjv4F9<@EOwqL!xgt>>0Bxf5aOt5yXaBHCjx4;@{QXD_?QGnXwUws7rhE6~d>g%Fb@_=u3N$BP z=DbBbQ*bCTXVT**h0*+!u-ahLs|kILQSV&$6(9VP%h+PTGYUHSB}6sfV{T={zMs#E zLVZ0J*YdkYxTd%toN$&wK(e~Rt@JF%8d81@NE$JLJRwcWjMmOR+8w>j}G&}!tP z<#D8${=I_kTIH(eb6u=8(nxq$Y4P}BCu>M%0eLB=c1w7LE$N=Qt+skgWx~_qhh>){ zE_~d1YE1-f_o`c!u3y(KdG<=wSk}7qhTjC|l!Pg@;L_GEzc#mHytYlh#O%4?*}PI~ zYaRUesquN@`q){i2cmpc z+PvmjCT9zn*mfDW%Ta9lyw<8m9009W2yIs=?84UCloUIELXL+K2~0=32E@0G8z)4M zi%`dWMP%Wx33`6$Wbwd*CHFTgX)#={Iz>eT98kuK(*c_K*8~e3igEvHM0y>HGjb5e zCOEjgD0_(Wa1KnE)eg89PUDwH#a7^6bC;JyEcQ(>#VlME!%fq{zD9Im7T4Wm9|Do; zjiiGSlJ?Cp<9d;_Irh)(;Pp6l1cojR3#YWNa5sW5s{r^5Y^A-3KtQfHJ1iARL|iyW z&+toS=nbH_r@SC%-N-GX7$f40as33Y+wtg-Q5NPItEexH!+-k6(>Eh%v`5%00QzoBN|O0WlP}k?(E|NvJXamZlW&` zuRBmzWBXi~?*K2vxXW?MTLHUONAWk&TP>V#*_>W8eTJNTQb?Zy5C<;p43`!Les#ym zJ5A6OGcDRcGMH!r9o!&e{Hk-SmD2A?0}E;iZH}@11*aA#oyF*5jG$b{_+1uTYoN8r z$;7qg4gh`vJkM%kPu0Z@Hq|6n2^VY~t#wB~8yGt<$|60HyQP=P;9873QAd4)F?UHx zv*q+5IR!G)26glYW|9k0k!B%%mXbCK*^rTS&cu4zM}3VkAszLjoc2*i=TAbAScVy6 z|0SfQ>S@vXIwDSuE}XLWsDvnJEo8{C?gU1uu_ctjtfr8YQ9-NBVIaI84lv{?Qaey ziDJ_x*^NIx3CYB`hdmhA%@Be!dNJ-QY1jcdlp#l^n%HXqxP~$P^rQpcT(J;njjUb) zIfKiYFQfMn^j!m^3xGq?&3{S*Tboco@O%AOTV<>RM%o)8;wK~G$s~r1tu*gxxWWu> zUq8jb&XwA~eYp449Z2w<+xgwqUrxGj;oO(*EW%kYa2n2KTXn2X!^mfVy^2eFZeqX0 zsZ$NCr+Q`vK#B_ye*yc(T&O`#K%Xf&80)o#eNs+(L_mo#dg^Dd69W3jr&Qvyd|b!g z0_ZOR=T9T~hLN(^!16VcELJ_OR7<)c*stiTNSuD~?1`&Fm4FGwxxGns1cbYIpX&=s_!LZt!~LfWC{FZ?cHNYC=M^T3P=GK zPJ&Kpzg7AB!((@!sYEl>1IY1b54JlF9vd@5?bqa(vC}b;aU$af$EV3L#dzF*=ZP76 z5hr6S_&X`XCR!we+HrQN99r~c?xC**yBkj|J}Vk&Vc-Jcq$zXT=0nc})UUVg4|cCw z^5c~J2cz6r)7CihI!^z(X=O9Ht=hQ_$Hd_?Mn@VDXJTo|r8rp-3t9f}{kB~UQSPRnFC@;$ocj3KSjO?tz6(h}mHvnFcC@Cqd-V79wh=;S55FZ-Q%`&iOiU`f*w(#R zsK4m*%XwxIZ|39H{?0R@Z;RrL8ZTUf-QB*q)^T*5O54DnRp?kZpnN_?eSo*kZv6@G zl8%7>T^`q!Qvq)SPR&0RAThR9o71@PKAFU>%rz=(Gm|*OB2&0*j?(8YF z1prQGqGi({p#ddKZ6{@{eFA6&1^(|FSf?;l4#QbijCof`>-@$EYwp~1otyA3opkr) z?)()z6LeoscGp+`wy*S*o}Y+tYR%-j0LD}Eghy&Se?8*^VB|G0Hy9iCUU67oW`|;q zi{@9(Zs3PuD1mDkEr*EQjJp7w99soUOf?q@f*fXx7}&IZ?@Y867_`~Qsy6^<0kvPq zL{`!N1XPBQgd3U8JJ2wT@8;o~B;L8vwa`KVW3Q0ppr`%@7(`nCEY3M?rV(KAegOE` zo;5g&Ziu#GNHPG64QwC-IYxT3o(#q7cEgDKo(5(%jr4Ac!csn;Vq8Kc!3=s6~k zhcO!TK*goj5CQ+q7~=MmLEg1jCB&ok18a0fnYIUG_8aJTg^=<*Je^DbZpu7~aSoZ-GBfRyjIqmrWCM1A9v%n4 zZtlHGBdbbEH&=Mqz|u^7i=qABT-`pMFge)<3n%C?}ux#rdP82|Jcs zSdrigK_bKtt2Kw2Zwz#eWrxY!N7Q@Xv}dfGW|(g#7n{fvq^(*W`o@Uja@GPVXP<@H zE~VZvFfJJx{Jok4A!6@I15(Cu0A5-skdBoF08xBk24o*Wf0OxMHO@LFV>~b*K*!v{ z1-FXnndi|dW$k^VuWYThy)gy3J&*LxKv@z@PvvrET1X#sWQi17Ze}?f;B&FmhdS`V zms=f1y$6830kt=?>H#>0GiZ9Y-#28O1idI^-4d`$1<;Pim)}dtIs+?Hz}aD>OqWwW z3+OLQ)zkQ>8^+#7VABnZN~=GWf)m(_)+rX&RMQKW045NUKjY|LjJ2+pnew@Rulb&! zj4ZLV#2@J#y~{RT2zChB79IOn1EXGs3<$|t^#pJqE!DB}WTfXQv?tPEiuowRNWHFe zQ=3@t1;1@Gu^$o|Yi~u2;o1t{qr)hYZe|exhA2n68E0t$$)cyekPfDmqD&ptY-WEj zQ}^pV2sIf9=&$9hd=pxlMhez5-T}@R5BfVPyHrT(HM8Gy>FyY65WWr*kY5@||7O35 z3%Zp6Az1=)HkX`XM8{x=)MT<_kquewLGH`*M)oD6!+j?-b~Kt{GL09KiJHKvRwLX_ z2ce&OMFKQ#BG>KD&p~dE8A9@S0*b*QCF|~30g}NbPc)cQ`vLba zG9Z6xC&lPMj^Jc6_T>njbj!@{DTHtVn-4y1%Dvxz?R=*5cU$O0*0{OCBS&wf84}EJ z1cr)bu#fEFr*(!*DQTPmNzehvG&@0ngb3$G7$IkZnA))YpFfUepEPFw%~{L&NB0i) z!WV^tf9Tgo@40n!t+i$C#1q5x=P6v$I4Ke!{Sjb9gj_>tD(rj>$+Dn=L9kXwhfK=F z<>wy-ujMtXU|;1xaNZ>39T8dHUTlH`h@%}U*zW{2!tRshiV&_Gp z&;U)1J%K{c9sP=hPyV@H@qH5Y!p>XW)B67TvJM6VPu)1mg#UitX?P~pbKqU4f7OWeUAh01 zzP#~!(ks8sFE6^>J3RaHuF>(!-`}rfV%>COQp&)QJV(pX>CvB77zYf}`6EHb9l-24 z&NS*&BIdF)^Bjxf!Z2-l1u!~I0ELZgAXbn8(y-ScW`uSLl3Af$`ri`QfUp!^x>s#i z`B;}03V(G=<;dbUTCAq*I>1~qtBE*XbO!+yP2acRWnCe0h}ia34>nYm&!HyQ#5@N` zg=)1#u|uYBIKTtXrCw^*MKof3*k!LT*Vt>JuEyQ>3pU@-D|aZU=-SK|3SYqOs2>S> z(rU`@j>+F3-&Q8%mNpqt>rT|hl#`4<-qb8r-H|f}fsk1LtQGt4zH$H;C%G+(qp}_;&WLqF}O)-!Ft$g~YZo9`$y&JU} ziS`*U)m5QFH3d;~ou_v94&cd&qh#5WFMFTLU3$wqC@ks2y5>K795yql6uW=&RV=?N zsrI?IW$16^YN3$*OKBd8hnIeOuJTA3=yput?8&QqRJ7B)+ll#ar7A-nmQMzA9DRV7 z$@_G{=80*Jo>NZ0J@VpxsqEpIy1CRn5K}qW8x7J5&6s0>-otJhdYoV8O4H0*qAH4T0 zq4mlhQ_O7z!tO2h7(+UB8=xR!e3_g7z#BOVGad0LKrihrJ&dhG^ zo$=HaU{Y0SY`)!0nJVN1VGAkjbbPE-xO6B~<+(Q;8i_X+=xGk0U%5pc>k3<~V2+D4 z3d4mu$KzozS~ABGDcr&P)|Q@F+LP=z_VKt#QO&~or>Zmw#r}LA71VXI7g8%(i@72h zo~hX$o0U2TToRVJljXKb(vX1_`m+;$_Buac^69b9ixy9~du-Np9RRu7J$OKDa(p+D z$EQAJzt@XgE_{OQuZQ#fa$ykA;O36LpinF+(K~8c6HySn$SN|rUdV$-wi)U9T1Mcd zQ5mJJ1*>;C&zxD1x<2#yQM+gO&TgCinE9ik&h(bOZ24q z0!mzT3mz_aXU-TDg+Iw-0G9@j>UgnKBUkyio9P98uw(Yn`l(;HcygW>X79h6?$zBK zw_GtTRfg>^-={Bh9GXU!3DgI}2IvJNMfBVMass4^<}q8Qr5>S299J*Cj#-LiO1OyKN)ubw*p{nC=sbBs;T|9)`MN|G=7LM)*lyg2PiGyGBgOFw$C z)ySzZjAOfO`giODMa}4)HWhD(nj!nM`-6LVdw%5EE_hB(^bW|&=;!Iv`YxsPAC<^n zPTe+bYkWrMLxPa?pt<8j+$3jWDK7k-+?Gl1Yk#q(!F~Dn zMrVA}%~M=8NhYHTbA-2+Zk|EO(xHi+8*WW$Lhb$L*RogE-SW+%#`MD%)7z{2+d&c0`kcEpfrJ1BNJoEpt9IC?rk9(t|GOil=HXlj3&n_x+Cx>`j`6 zIAb?IR>;$sIpqWAwgnt1VR&_sz?YNSQxMo-6{Esjt9)BAd_<9!jo4C5xa*OD_5J|o zJ+33A0@-1laJ=0mD4tpi8a`~M#5HyBwR$)WZPyV0$&A0!Q)eC9bV{v(*nIXOb~Jk#j(~;N^j~O zu7;n|7fM!!^^14Pf|TCZ1s_j0ndyyu5`Vlw)iNl|-hp>JMjBP?&jZ`XAdUXGmzwu{ z5Bc*T5|sr3uUzpHJHqCCPEOA(Y9N*!oe_O3JWX9hu`BfQU?wiTe`n3yB^j9qP`^L#egr}5Uj*!-p zx%~okp`Mg@UpA=P2N>u4$YDxeg>7UUi3yCs*%0n3OXi_mxPQ7)lV#rfOFF*OmaAFN zM;gmN=ygz00BZ*0(ItF!7BNmYk%e+npHxYV%ENb(1hJYmImARsULn*ZXwmt8m6=k6 z@R8*S-FGiMKO$&*0i){u5OsoI<-#12UtNWA8YM!gDkY3IR246lit@-aoRoj$X=VXD zl}aI&+eCTxn8NVz&K8ddN;E%wHw})GsiGvRQypQzs9I)L@+=A>nHCioVFhjEF?~B! zMtq*Ma?B9LBgh%e)O5{2i;S9?l8W(Vc}CB$FWbvy;l)G3@v#*v%1DWA>cZH{ z$uh*}2gxqnlmEP{v?PMgsP&rGTrJt;+O}hQD{H=l6wpKoqN@ zf;FM(T7)hIU@if0EF@1YOq@Qx-mVQ^AyrZ9bPj#WxlI)d%E;ldP)~!+m?3haj_b@v z{IrURv_is`B}i2rASz3I;;WU<9(tppX_;glj8@)+c%s`MaUCM{hxC zTM~B_D9Bq8lrOmc?e6H;+5m>AFbTtBI6#ClqAteIJWEos+piCD) z$IZKGWvVivdIYALC#fuxX+%B5#ai=Evbw}fPNM=hJyh{jy+liq^HH>Ky`Ip=bGD42 z(p}9U+(aqSlBdSvsXC`TEoh1V=q1WHuF zv8qSQVm8N*T0f+)E}5b(P zOwGNVt@o}5-J3pg^Q5^|T{cRt99iC@VGFV|Q|{)GTHe@X&(TE8m^{T_likr2?g-R` zsn;|%)cbSG!pW_t58aRa^Zw$fqpmaV%fIbSq*iw=%t|v<^hs?BP6$@K4_diDv}Ef8 z#`dgj2FbX|=;|hPQG;T_JzDwuQ0ZI)F%hha%6VnHcSbUq-GmY-#pkyVI|@ea!V;`I za~Q|)&2Cu2NDTJS2fwiC+u6pbx~F)kr|#5cc(he*etdWyW8JYPX#IwV^+M+P9PRvP zies@u z_zEu^4e(IU0~O&tcw5JIuVbx2&cN5GjQ3I}^!0lU_LJ+-d4rlj31s!vqCpacVDhB( zO*>Rgn)zll`B|6&o{DwiapRy6t=dZm^^b(zV`zCbrs77cL@f`UvtRu=En>JQr4r6)k6gD+6NVS zpH?KF-Pt*6FV}#~(Ua2wh|!}f{ZKeoX=U(ntYUWD^8Vi}N8OrO%j=NLE9exHs$7OH zF{#J)oSbbziR(d_l$0sGwK1^Gt&Qe2h!BuaA@!_A8M{J&sGgyd+Yt3RBBX_GbgmdH zI~6X~Oss=jCQ*NX7`slc@E$@-xf^`|o>E2zatGr4qx6yOxm(xiwq{Eu_IlM6BkCo` z7fDE&n2Pv+%6kZGbm}Qw6p#1IK0vK#RcFb}K~1a$erm*w#}}f17d`XyezeUJIua-% zwWO62O}aoawy<*cP-U{AGHZw|uptZfD8gi1mPz+L< z@#r?v%2}D`#NmS7W4-A(X&&O*Cu`FC(oX8lkx3MhKM|0!@}5sbbrDr@H-9 zxErZ^)Ks3bp$}ql)l+dwPOO?`8jI=BQdvcY362>e{X$HXedPIdBssw-8zS$DQlxP! zss75}|0FEcFISM;)*sx!qE?3HC^*InvG9C`tm2;Ye81rNziC4gepSuHDKp#9Jqt;) zp~^H1`SzU(dvvcCwkdTE?y19zWnRgw!=6G_R1OlwKUn@6j~mjIwQ0u6TBjLF1fOCN z5l=8|%%>K3Qz5{uOvL&x&V?swHB)g-IRAFJ7RxQv2ue2;>U3T-C0mGEXKE{F%28SI zal*$}B++mdDpLp`TpVQ-?lt|~jtMwx#8-DwlV(a2X`BVDmp+~~q3 zR6ZEJ#r|N7Z=8CddiX;~D1C?FuTIVJ} z#c{fr@ebtLh_Gjc9_qsu3F*x>Q;jCxC!O^C-G$(QiGc zGH9{AiV=PI;@yg==Y#Df_XzUAodMj@3|O(J(d)_}%Sjsb7n?#jvKYWJZK*6Z};|dxp$2X1q&nOv|MO(0T7>ek}CZ z?UHppH8at2(+~ZIrY;xutD)KWRI(Pww?!H9yn-%1`cl$d0u3 z_Qsx~*;PB*k(Esv_|ug4<}9bPQ@%cE`u^g-jB}+Ib6OviGtZ#OUa;&W3I7diBKvu#7NQ=(tbg3ELTQ4{*xtgk&^U*BrVYRo#k@LRzB z=IN(8`X6pyxY3XX+H4$Bv>OeH_gjnK{vG_-ImUcjbM0HmlXI)Te}4Zq=BQ9Pd5PaY z51Jlbw7QRSh*^$+XXZUEdM2^uxPg zPAIO_&)qt?8&mTLob0SUwQ24Zjtkq|D?_0N?ppVCv2)ZPYs);=`5&s=SrvF@$fG)J z`hIFjp2f3>`EZ z(U3vClHRa-)E^m9f=L-euxvgq>_0&QrRyWZyo@ zP8~+jV?#F#DbA7Ud1Si!ixfqd%D$kqaZSD1O$k9QX_pj;{F=XQ@AcWdh+(1%EnoKK z8gt^lux`H4?smFcWeV>K-)sDIbZ!2FN;i6z`BTcS33j*Dbfq4&p9%uFt(xL_gBMT^@Jzas_03Yjx zfNVW#v(TjG-F)U$4ulTOFfzC8Eq6K<$6CWUX(-NnXGsACTkBhIT%D3c8z)lA6fyOl zPOtf>IE$-vZJr1GadD1=OvPmySr=uMtmiQ9m1{;v8x))jP*IWtft`L0EGaP!_5)S} zw}bQsWBaPz?fXQ&#fhhFdf~irpOi@ncX#{*=%Qqn)=b1ei#7DMfGG^^F^54nR z!%b4QkO>eGALmS$DX>0$lrL2k^%I+%2B7ZI-|FPF9>?u2)#2EuLic`o-0D_x%8 z>_z%craury1bn&~ZbZ1u*bZ8pFl9RJIVUS~oO@*z9Tc@w3;n3X8l-xDb*HUoJvm1D zi8H?1ia3{=17dPt(-(6)>HdWTlL2R?)<3mt)22qxYhlN`>~Xn474t8YY+Ujj2D+Y# zsLw`CiO`+smJ@~EY@h)??A)vGwqIjnEH>{!=1MAgb4<`E*xpj);k^G=#B$L^|y=cx0Y59J8C_dhE=)cHX` zOYox`=-{qsWREG$HG5r6fm+Y&)2Brg;T46ly*xw@hhGh2`7g_JdYO|(30Kt2mfKb> ztWS&RZXpX%qNz)m;&WWYEb>wk)kfXnbuvx9Jl~!tBu6c*WhXS-?6)F=Xj~~z$zG;( z`g0J4#KH{{pdU+W&yP+U*x*OY1MB=Q!!;I;S3s7F0vL&k74{5(GhYeGF?J9x9kc_- z2T7o;cioJldydai0~R*`@IPP;bVwb(FyUT^!r#{#&=dQ_0DA-Ac+yY8>&{yzY9dru zU7Ay&87+GUIo!fN#%$r@$vyd;zvP#aZhlp<1S*HWOqYE3=r$(d9?e9Vor90DQm)OT zkHyn{rw?oh)aKLm(_yNvodHtBuBnzZuYmTNc;hpk3{-OFnTZkLshi4D6L@u4xlAIq zF_*O`1c?|+P(Ahj?RK{A8Wjxf8F3@WTH(Lq6Jws>nJq-E^w>whGVab{S+pJ9g|fJ% z5>LnSKAbiya|6*6=sL%uj(iC=29)L7>dGqF*FO;=RJ;9Kg8WsNN0JU3`E4ep?*Sn^ z*3dcm#{ka%09DQ~!`#PIV+>Kxmxt!_59_){*_f-P`lrronkrqd;+*(8@xdf;{}1#! zWZ##}pEm??#$#2ng+fZSr{b$q`{A`ExWiywWwcEg=qQmf@3f_l(z%nkYm_d>>nX1M z+8R*S!3P8&x4)!nakJQ#DAgP_z9DFy-*U|^zce6`S4tv(y%8JdoUw(7hH3Y5uR0wN z-50V%a%f~?ED~gG<N(PzF>FrS;DU9)hUB?N?5?i!rgju}xS=AsngA0V&Bo39hj~4Bh}>f=;Y*;ow5Y zWv+f8*}YI77YCWd0WxLEbESVj9J56n(j?+ZFe1lz)Qd|5{zcIQ_Nb>lr~vVG z5e}o^hKri*f|Fgq&0(XBR5_|ZWY;E4wrh9j+GW>la?efPtUy4W=+!DX9N-TmL{L=a zjT(Knbi^}Z5{5@eVGs)uEc@osidJgvz!mD#KfAsWcQI{8f=ua;LxJ+-07EjmFWIxc zJ%I{&VajlAXHuOiP>KxcpagvfwxvC(y2Eo#JEDUkbwp8)GND)S(%ZFdhs;w|m2T`EsqvQKV%f*elZi+$P}MoGkB?_lre$s=<* z5{i_*b;f}HnLa`7Zr7EgQ#$O5t|t`=9q+1WP2z}VMS=uQ)O7?6!=We=WZ3G}hB!); zaT2A+JE3ns;>FFlGTh0gDnssedS*l3cFHKJ>itNS2MTUKtb9ETcI;yiw@R0;jyJ1b zjP`LGMTH5u9{bvn;G%TuQh61q0>?iLCFn939zvl#$d<^g7`hRbht!C{ua6 zG`JU8l?d~ON4J)jso~@TZ)S4HmUdc`QgBY>bp~>%?_lW(Jyh)I2b@qO(2zw*QHEE` z+yEkI5}~*vNDcu^8E9z_&Vd4lJA#BFbeTA;Pb@gsoHQ`laevihsPaJCL2rOz3L{(nd zIEmZwh4_x@d%k@lKHBN47w6=O!|uZNIYwU>B=ES^=ubs{Y7~w?lNq^Cz&;~Zbl>-! z(yvbJ87uY_ianY{tDlNcwa^3D~eX;L6ih4jNC^h!aIm;rF#{zm6vRq!(9LWLq^dEG2VG4fEo{UZvhzeMvJ#}t6X^R}DLO%j3I&%PaM zt=FJ_*J60Wu^Qt&zx7>-yI6g121mPgu%~gTnQ@SL{8B$2%rl zY10l}mFzr-(raejs!nP2+hf45CUM)hTgEiHU_19sd{|Tc8qgnqsC->t)i`Y2{_rFeVKj`{$7{13u}0(#Jidf7o}fs?l_~@9E(^ zJ@5DB()EMF0UK-k!vn<+DvDn`W%h}Wyjp!^_i)uKpR0XSj)<29KdvbjH6Iy2KIGjS zu+?ydogG9O23sZTI80(Cuz@okfOkgrAfOGlkI)(KPbpp(=`m(_J*{5sS8(H~wriEH zYo|=Jw;Iju?)rWx<(Tl}+MdPN_60X}Rk6q8Uv`96OFl}lC8wjsqa}_!)5l*^)|KC- z%}=4P910yigOcW&#CC@NsOtvUmi1c>j4=bY9m-4rv0K^!6+(U0i@Lfu3JjETOZMS6 zW7VnjTqwFpOo;2#UB=AtqERmjn8zLpo7D2Jl+Wzsh{%s@6aKp-ooN+t&uWLasXxyX z#YI|0g$s{zC}FlSVOr)V$-DVfU1y}Df_|~Wc7BUPe$TxFDGX|Iv|N-nGmLSOG@;qD zaj2*0uG(p(V4Ob8*|*C1j}+%6Eh8sYQmx6%l)GECU92JzqafRDkzpov%tk)V$&*&> z@|k{C;J#GO_^m~hQd+D_@%%ycZu}(5EoJ1l`u}fru}BZ zM{(V{z+1FsbzMuVD<>7AWAA=W(^l^Ie<(WpxEA;SkAJT1+WmdEtF_vylajJhNu{n; zQY4EI!j>dSCLx4twXM+2a(6<9g+mA{4spWPoe)+!$^Dj_lbgd*hmLRTcm4jYM~}yD zu0GfGd3|2*=QICvd_ElS8T;RhJkju%(Vt(YS2qT}>ez~DR_M5h8rF~J9yvRrGk*#q~Pl@4fLCXr`XAoK^bgr)|44nF48e%&VOS|N>cUjFo${27zJ#9at-xspHb?@9Bo6XvUek`}j@v_0_q zh_e~HwIoP2rDRyb`XhiA98jW^e8@{aI=orBK56=fdDW%g)ZfwJ9)<+DMvRXHJ7lJsB>y-ZL;5tyZOS^Bh&5%i}Cr9%Yv4!zvOgbT~zC| z)YMApuzuF@KV%PJxryVZk9O?xO_DbL^73NZMV+T_`n7TjE-f!}4m4Iw6O zw(G8_uYdiTHSS6RO5ub8QX2(SQ}1o5nOQSDbEMJV_Tt=>$)U?P`Cqsj!W!PR`rCwM z!*dp2$vZW9Ms;VBzJ)J0vwEYQvQTP0s zU^g0^YLj+KiFkb5F}-O^aRS{%>=(3=oG9uAQD1Yy^Y+fZ>ht$d3KFt^<$qyOS5ux<9@ue}KW<8O<<*_LryM-`-;VWD4qe!+ zTs-CQ&CQ1&h-w~O-EnxzZ!b4%!sSPg-J0&941CPp6Vd4Tr;ep<7yhX%Z!b973M*$$ zJAQPi_DY;h!yY;BoA>8GR=C9;TlwF~4gM}o|(xSIexrt_1fE< zF2c+S37M^DW1p^WasKn#mU`W*dH-J8!??66ntg@kBDQJHW|gMEgL8e?Sas{HUk_Ao z$$f^bo&?t|rzE(DW#}~6 z3tX2&zGPGXev1>gWl8Nm&g)a+#GUNKt+#(2TR~88(&n?25vxPraUB91M;R{OG_!4=zB=H9t5R?qu( zD-UuX{2TN)i*@h1N}A7pZ|0;TWKTm5*&xiv=#hqNFC+RNW;g7e`sbPEoM#b-4>aF7 zTGcx#_CD+2_Kq`by&mUjXeb$T`a{)0I%9uf>$S5VR{R^`+n~Nb$>4O`+J0}+K28k+ zdm3Kv-S+$Ol*V!5$j94m|8MHjq59F^QJ2skPChp}UKZB6{kV$%Gp(j@FBgh204FYv z4!^(l;M%nZH{SY2?r%D?f8@u5Z$E|}HeZWM@NKZ!|Li)alVCIJ2kr#=&f09ha%%j) zdZ797fVR-Le9P!eT_bFb^1cgRpK7e{@cCDNsTH|<>&z{ioeizy{|t#jMnpDB0iBCq z`-~iDRBJqF*5{*>hn7{k+RVj#oDUiN@Mi@p8v7 zqOH1l@~~;gM6a+^oN3=gF#hNBH|Y155b2odG!=PWf(L8EZ6RX^Zx22>@%az&5D6P# z`ZL?JYX2TLFljK?Hs6HqcB3G6hPzuPz<@dGWvpveJ>IdV@}*a!`RKrjgui~Kk9^L1 z>jv7&&U^^|xAOj^?b;7lFZ}FQ;1`%4d!ob5;eudC38qY6)s+xa?B-A-4T_jBrGjkp z^~1*-Nj=6=dr4CGY!7?U-Fo$c0|{?`^#j}$T=3c|dh73s6_z8IU16Y<3{}&wGw}@_?u|tl;Rq>l>uO2D-^NL5L$JMO;vc@B_tXb8YcV^y9kXst@I}C_2n7*7 z-C;wiE~|@nBy|sxR^C8{&${2rGgIcMoni=#(5jOhQd%hCkhm-GcS^Z(iYI!vBYW2I zU{*1vGC9+{y9z7lv98nJL=Rub1!Ft!^opVOvX%ZnR(1~UxyIU?2(;%YvYu{F;CaJm zonuVMkH0C48U@08xX(Q(`47P>@br)SuSe$bgDmZp&e^Fr){-`-6FXNey{e?cM6s&M zerf8lJt^Of$OL}8<(MYklY+2knXHz!-Qd08_cBI`#&eJOvaU@NGk%?kIj{QqKvkR# zrw!+tZE`G&apkr#z#Z_r9k%Pk?+*(yfyy~y$t@~_Ugcpw&9k*k&3}L0kqrwH-c*7< z?Q6~S`<5$v;v)M6_A7t{r-(`lc-7*x{ROC;>nS$7EgBRO_E1VkPwgS~f_6sITdiPk zvYu?J+)(o_qs#~SFP7%WZNTATdV#ho`8*~|$}{j|?IUq$5j!qaY@o)tH+Z5`Llm!U z199?VyI_aHzxIiNHq&8dhvtCD?lSOlO|}#3cg|+lN=bREO|>|`=+h_2KC$KuigU=I z$^jf8qpZ0eL$GqmuG5|s zbxuydQ*Dux3Zh%@?bnqtsGgjHZM(LTN%}+t3e*fRL29uuL|6*vDC`dCyPy#XRg5vy zi@jB@HWpG!5N=1X(d}lvj*`*|fn_3h&&rCa*gOP?Ocnor4~1}O-*m0kfeBr18i$CWuvGn z&;A(auvXzxe(YZQpcUFDqoR`A#M9RrX)$BHHpXVH5eT7 zr%*`%>24R|N#jP*%JdWur|g1T0e#*0xizi!|I-!welb%w#||(W2Rl4pn3y>kco#8y zCmZNW*I^@+$3j~o@}S+pIH#+pBdr}+#jSXQM`5#EI3eHYGP73t8`sm1nt zxq7cB20Ey7=Ramo_y4J5Ezsd@^RJyEI0rCO&U@+fiAVsNIfi#W z)f64xWT0*2%53$W7!zxVnPW<4EUudti7`%d3vj?9Z}nF~wk zKDn`oSBm1NN;s^1sV({ zwv@vZS(VXljgpe_K%*2?E8U;HA*^T+u%u=9{wMlrA?UPyrTP^65-B3AA1aGSWeh{p zbQfSO%QdCDqzX{yObG9~_%*z@i4fYaRTrcbb4{bgQ};pT+09zJPKi2s*}s*)0~{$N z6pO{W%OE=7m2<`6I)Fx&Er(sD>fk^Q%(cXl@oYG(qijWXKAEBGC(QB{mPZ0TKTlp1 zz6eq8XqMUcg?8|26uVMlTd5+8qMD;%04%hP^DYs&|G8^J>M{OdJw7~3Si&TmiIEmD zsb9NmrF1!H-Tom!XP|f(=i1%tfA6=no4S;330{(T5!43;oXhIT|%CsXM)1M^ZaFSeHq+LUI7OCv*FUsN2=AM>z@ z9!H%TJoL3VRBU&(l;jLVwA3um9)RL#P3zP#@)#&&kmxpnT@-^-^x5%y!VgEQO|FMfE0#jcHQqfNPrB!C32IlMTq?L5mL^p@9>}i2n8!$|l0LIE+!#L$U(MCsYO zO#32(Lyp1`mhpeq+SRG;YQ!FL1rG5in_+xC>by^3_p{c~in6MK4`QO56l|+j<{*-~cIg>pz)9Ngmt9*oV$H znw&Id|KXnZxkxAV7KwS^E~K8d(bfTl*vNS&{aa_Cbt+akdYSn@4czUuAy$Yt9|GA( zI8k;|lkUDqPy8a~Zxk}$LpDq?B}51d>rA)`gx{s- zT$9n>*4p9>jL0DO=;+2#3=;!60j&#J?3fMNTLrj2bAn@vO_LV8&d2=}616D*n%1UH z%uHwTLJYQQ$Wd-~a4=&|Pv`#Im6!#vLf}%5;KBsNCKID=pL?pDo7L^`o7VQ2g8shN z1{O1V|C!3(MM#$YFJ~^fuU1}H%PBMQk%``WqRCuS<*3wUBnE586I#x3g3=xLL7V}M zH-t>zU*yx-zMujEHi)x}nQAmpG7T;tYn@uO+(QapoDiU>cvSDYOTlM~ZLJou%WD&8 zGx0iP&JKu8ugvka*{wqCRs@l=^-lXBZm+?%tkymia;`CD_9>}13{H2^Jk63d$+5;~@}4F2<#ZT|O$Dp8)uf!lkel_fzPi6?1+{T}_Y;5ZuW!@CJi8J++;% zjQ1Aedz;;I&90Zw671114T2FhDMd_AS-Gw%vApTc?J8$}{ z(Ev*nyrl%^EWOisY*I!{n^783Ep=`){IuqpUGCSiZS+y1<%BYWOOu!tBy=g9#p6J( zjSA)+z;CW~T57_MzPRrp)>2#7LNooE+3}Fs<%E=e!^|s+;a-+Gy+$+F1_M#;*1FxnQ6WjBfYPo3whfafQjhH?dPm*Y0(@RYX+wdgVUk?+&%?o-yg)3SleER zX90{Z^Pn{ZaGrqjjzBIoX1jcuLx#a&RN#8d;8ZH0 z)hV2nVrst7?U0c6>6PuZ+W5{D?w5``jug|3GRiZ5kW}# zo|dL3G@B^-2>oX*V-R6r5%qnPbq6F6@iA0|ONX@jftK-BN`2CrX3f^aX;NmSm|rht zJkcWYDEX6?(xay}3TS8av_(o$sEN`ep!I1fCMoNr6qkbH-%6SJTIwegGZU?lpF}JO z^^kg6;4Au96P<~Y-)b3+o2e}eY54-1Mgy$_tO-JBpAdPziTOk@tJ4ERw;w-_!YL+g zZ3c$_rD3u54eit}DdUKi@x+}rqCN31di*AeKZerlTcJY;>%M+5{WayN$rUM(K^f8Y~qwR$E2qR638+44gw20uPWqcA)$1kTh z3K(f9z71s_(L+s}84k-CXRz0a5Ukk5d>?{uG%*io8Aqfz4Z=EbrT)*yi>Vp7HiUIV zdtwPfKVSkFpt4IFDSt~V)lYg0m`?yRUr)Cz#HS$y%;D9skkBlkdZEl4dRmWm)}a3P z4cF?HNEvU@<5lRUou?V2CYsA@kp1f)7frMiLVL+uTAPXPf|4v+M$Q!KPZ_1naW%oNkvr-$+W%W*v>rp9z&2SR`Cd4T`y#uO=~M@#J!)C8oFjp_ zaFGD^H{sIt?&$~)6+oUcUK}MndbVd%qdkU!9UX&nk(N4tIMR;jFL2kqC-;pJ7a_Rj zXQZ~8ah}l+{94{`5x`vOe5+yhruut}OvEh*xbpO0ilp6((f4C1!uSEV^Y>?a=>IN6 zaQ=D^FL&Z1>Al(d_Zue={So0D6dHXt`xotKTIny^*l$M zqqX=l^g)c4Fkyh`ye)9v57^xuK9kf^e}DEiloD-%({}Z&0jIBx3BHcOVc^j(fEYi} zd*OUv`7YmA*ZbC@_f)2PnP8a3ryg$(vH^`e57pOH)Tu*G*rm zHHVvs6DWQuwC zlUU#Urt-(U2}KVsxb%N>?W&XxESu(ee(9sX#&Ekn54Dfr&Of)u=bCWmqc#M2vuAX3 zsN>GwH~QdGY~5?w=g7yfdcU+kbmm_aZLiOEVhyLSo0hrP`}<_< zaKB`Gx@gDUDC;-dW5dh$jGoQ?wsyz-Uylw?x6ad?8J>52d^qiN%5QI$_x#(Q|Lx{B z_goaDE&Z=OYUHX7@I=OJcO~E}-Z#39t;-ux31EPN*kyByi$LG*`A|!VX2;|ta`Vu{ zTDIHd!m;JmmY0Z%tx7DzItoe;M~=YxRgf=wIUegM_TSyCYyxY6K+J8h@W7_J6bL-{mDV#N$p)!pPsZ*b6T# zwco-y`{bbiDX$T3-->w`*W661nfYthoioyl*JkybA02g_%C`9=JN|pQKx}hMZbOWd z38xkzboYi5&OPO1k4sSNv2xdpJ$7mNZM}4(Jd>Zt}!P*>ptMnQ?7CP z`GE5)H#&9qulh zk(SH3FEGYvDZ$)nU%R=#I7{rs%_s}{KEFRoxna{YhF?nbE_*+XW3l}bJ3FF>kM!$< z%pGc(Kuh))4ecLu<|J3u;}{vJ>uOsGl&&1g&g#AbFC(>r3Q-094bhw*?n zWAXXf>=A}14l)nTM*v_!=9MjfC4}mDy z81G5)+Y2{_hVI#Vj)6)SkOkl-+bro2%lV_Ye6r`=C;vp@51YL4bB?Z7&J~kd&ci!4-31;)y=Za3*HklbG zllAL6?tb0_Hl1%grng?9+OL=3$u6Nf=w@8F4kHrxy5AqaH1!9I0}28$Cz&Xn32ZfJ zZ^xmxXQIKqaZBp)Uh(`-EFSqj#A%(IhkiYCMiPAT(*5@>UpQAy3>S1Srkgl;^dIE$ z-utt6{P_BCZR5o4y_RU(TV+FNItFJCtAp6C8uH4VV!PF;q^XyasUXYU2MPOrLzU#3QTcxfDx8q zqD!DxPN}FW8~+h-U&j+be0SNJru3A@@oHzFQ)iXFasd7KkhZE_pe^0ys9<6Ab`2V3 zjW~`~@(tsnF{yyYY;PP?1>KO-auBRRT3S3w*G>K<$HTEjOJsRNj)s&Fm|nby7t#eQ}1_rM;8*M`ZFeuUMS5?4?NcXVM>vporzpDs1am zCM%bXqQq0*fz!2htlYK%w?Slu5T~K!OlYMG`W4JoCj7#951w=o_8y2QrbZMyKFDe% z7N9ANOA6;t34@=54(<3eqk;B_OoO&!R#Po*p#UHg~fKOLYP$V?5s&svhlz$?=9h=#K zT|q5Qt|5sw3W=^aeZcdrgp&Qg?DR2k8yWa4OWXRPzd&Q)BnG8yq4Qe?J!d! z(IO(!PYA;4VAl~QsMJokZ|Nq_`-nxrS*?@ug=N{IXvcO*OW1c)*^(4@SKof+)J_q7 zO6PIz%7SP{?p-{8R{_y=eX;jIEj*{Lm1-$y4V+&^+}df_8=eZe-5DZI>^1SLEXv?4 zQDuVVSc>q#M4XMR{BAOiFU`6oh%hklr_7FzQgPua|Ka~5qR#cDc%MW}IG}lD2SSTO zFC$R!rQ@vqSpR!mHFkA|rXOqvdYk!Z$)d95gcXA6!W$Yi$24H`g^W(AuO<2vY$Aos zZH?eIl$~o3+kTPb1$_$Q>?WDr0JheRqQGtJ>(xK?_=tT{W?;&IbM9z||CXxqz)n1S zuSM-Vs3=`If&&AC)7+Hn{`%Yp2%FGpVL-lnt3^h;`13r4EM^AFWt5?Kc>IwD<(AMQ z?p-V^dNxMdfffCMp5O=Atl(IDM{kqT@awGdd7%R?a=m(L2UNDB%k<}gxr9mQb+EWA zhW5BW)_p*>3)Vti^iK=|yK^R|7{dB?2X zf-EfIE8!}mTp%^S_b^c^Ai1A6sj|qeTw@(RB(i~$m!jP?rRLnzm&|WFdiz-WA>ghx z?6miVl3X>#e)Sx~v{(8l(I{YIo&3=Qi}HByvkZB3Gd)2Jw<)^p)}3__Fvr z4t-TcCyZ%!EoA)*t72Td1P|`?xB18`nSm~Nd!4u1Jzquv8OJ$~kq+N{4lV|3=JR!k z+eStC%vf~Fuj49UqOg2XTQT*R+<&s2n2`g(8x6?@ahhthLMs3>p&S3Vk$@Fevk=w= z4eg{{$eqL6B^eKN)MhL?vrsPRs8=76n~Ylx?5%MKd$NYvP6CMt{i=XjkHtWMIYD0V z#1iwnDAyz>|Mi8I&_KCwp}p1ML~Q1K0E7}|i+p>MiMZL5>L|w^+sMN@>UBLOTg14? zX5|C=Ii*v?NM+Nm zmoQ~0{j`PJt}B?1a2?rJ=7N;`ObN48M|f51^cRkvP(^+YfI&()tlwwUj9lc9RbhPpCVKdX% z<@b!VW<8_Af#9uU&ezz|jrQwE8py{-F+`%S!*}bM#UjvM#TIGFN)a7v@fKQ`hY)g- zk*U=&E@{8Dj-$pnAH+iswNqaur_L#2Q5^K05)hD8n%_fkuvVHDZ8a` zv5`KgC+Vxy`y>(lZ0aEabH9F_pUZG=cl%kYw+h{@VC87Z@1@XyfVNvt z3_s1D>craRM7D@1U*x1JJ@Wv+S z<09r}gmp|u92QXzO9|&K*wSKUezxNm9Ag}n!e22?t&ZBEVHRr`m!-6)TB5~5?zK=S z8<{7KWSNBcQAbM=*cIe4ngobE%DD#|-oMm*Q zti#FJlv0#crKh}#(3(j!zO`cscCN5G_W^8$CrW*M2uq7KkY2 zS{C^X+tI|hVj^25jQbke5CA6r)gcYS)?kWS_8<1=R@43$f1qH62%{ zr#G31UQ*m1J-tj#x~-#PUXykSeY+OoX~^w>jt%y0wM20zk91*eXCt z7tq@UR5k)P=;)0I5$lfrEh1;;kM}eY{?t=Xim1gp$~}mbWTDkbsVj^NDdvD;DgBZb zKCL6HmQx`O@z7>6OCOK-2Q-KQX-MeviIPC2D7`@opAir?vBh!| ztrL*jm2*~0=^Yk|)d*igsEwM2GC(<`BVl#>@fP?G0ezJOij`1*$(U%&o;cA+xi7I& zRv5SbYDp@W(ywDgTaTHHZ}cl#N|ct?W}!w%Z=TcB59=TSpj87f2c=QNX&gYlE2aAC zaHoybvm&ZoOK;OqZm+&10Hm7&`dpM!qag>-IyxD5HPJ!;oLUblli$7sbA{tc!k@Z3XaapURqB<`Mq{ zF}$o$bg{WJ!2mUtb%Z=@uS{D|@XSU(7FhYwpQN$#)Ix{SF0DKv9*aNI!46Q&~8^%q8Sz&Ak_Sw?@Zej^~X3JtuHUA?P3QUzroU;on8m>${b|{A3Tqo(Er_ zgttoInVO!sr#`XlUh(rD?;osq35>9NC;S!@rRfQM-ZT5TH5}s?Czr!R?LA@q-th9? zSj~i_C2&f4ANI-2GzO>rHen{xn;6o&@*HjsN}k5&Br_gnEcuJl@F=K;JxSNMWcXqH z^YA77bDO93&iki#`nkVsCFCt8+)Pb>`tzPi5_lST=zgnbnq}d*-zIEb(?4O&Rliw% zQ@6=)s(O4GPh|Z5DDbFu7JP8cL;0Gk+szgS(pY;Pz`u1pQIsE&QXXpL$ zCeP|yef>$#jGjN1z}H_rDq7PMrHxTM?+v?$&$;(#)wbv9+n)54KXX6zq;}0ipQ#T? zj=f)kp&}h=JUe~D^S+YbdVGKDIkW9?@js7tZ0i~R?a|~lS2LzspR8H)^40ZjlKiu*=g_q_@35RW-IAU^G)*ko<`M;AQ@qudsM9)KrpEkZu zbd;_4DYNU)Z1>^R-Z>Ke?%%!dMW`}w}g{X>iEN9S$tkJxa_f_N0zn=EaAnJ{3c$>S%a%zZUwRbYSrlB9ILlc& zsS{_}oBlAr`C&!6g;E7xyPg}o;Vu4WINZ9?b?qM?DlAWOabb47f3ysh*;sp*B?hII zbws`NUcbO5Ku_2xCHb-OHAae5gTooPOJ}fMI2=ltyST^s2DB|&4(}EdXNdU3JeLxK z9R^y}t|OG|xqox`B5e6&5nrYxng1#9a;dTm6Z2jh* zw`>%fMD0!%_zB%bI>KW8$^9~zFCY~fq4DHxS0=je?&gI^ISw+9b^|94xN$i6oi}Yo z8o1kZaJufRRnIk&JQLo+%SFzU^ZaE!n^<1om?XX?9EA5r|h8%(^b`9AR zbS&MYw7f#Rzu9nc+$;vI-I<_k9jDw=;j~~;+QI$5JUU3Jbb;6(?9lW4S=5D_jTx1K zb(J-ujJHb8@pZ*=JDW!k{nZVt& z*|A9f$!-D`Jp%PR7~LC!OX5v&0Q$qTz@VN?9N@lD3LHuWs^=~rWPrluf^ZsO-xkY5 zAu0O)XR_s}^Y}V}f-EgH!(h(n-$jBsgVKf_`+#(RD!Um{QNsXgFUwO331Dn}mB&4I zGElLVrBVeHh6-_HL6gaiVx^7u1*JPJWUdrZmDz?BobO;?^E7qwb^)y>8mr!AK}gNsGAK|vVg@{A$UFiT zY1KXhsMS##YM3j^xH_(a7E0iNr&~f5im3Ddy}iy`)Zaq}CmZtpedpA=tHX}sdyB{u zLOUaYXbMLDCCXN!u?=Va)oY5SC}&g0Z3{d9{$>K0d4I|{JMgj7OUb;9uB1_x+@WZ; z95ZLyPnmZ6czqhsm=b;v$;Yn!y~V@^^qI{76ElH&!l`j? zYxjwT2XpF$iF?=PtgFSv*F1i^7o4%0!HmoQ5%+*p&bVK9Tt?%#8wF;-&6#mfn~(J` zsBK8x#4-psGpE%t*gMuAr!A4mwfP?|!c(6p%43_f+(%6?NHWtve-S5`OALuLgMa4% z_Fa8k6%{kvTrxAe+3J8a2u+d}bC(JXFkeC~tS)w5?cHh(KG{H|eJpaCF%s=mEh~?= z+|ReI#)o}uD92h`HZL%@sAtwSARXYH0@Hs9H17o7F|_I|b#QvEqUYcz&?Vu22f`_L zC8%-1Oa{*q*Fy=>$A^C8Sl0w8c3_?xrGedr$*Jpfze{v z4(B@5PyD5iUAa6qhR&2?70MyJBjy@h=G`{=006luMMO&-Zpx^fn19L4`(`5g7HTV2 zWE8X5gU{dDK;-}kz#?g_AkzA+JY#ggZLUZidgu`~Lmp$d=owM8WoJn?8em+X=dEkt7C*RW>AlV?^<*v~nZ03}W3pFu&`_%$ORF*DpWzT#P=ZgXmkkd*id58~Y zn$X3b?%qO~vNE_$()5LUR3vEBu8E|E%)ZQ7xZV~;c)3bom8J@>j zp9IBIuqp5rsR~!MsD1KJLFH6S1<8wx56v5*t*w_)O8Vm_Ab>Tl<`@@Z;}``ZS=!hD zb#WbViG(V>G%XHZ)wdke73JbC52o$lb8{oaoSdP+4>nfyx$b()PxWUMyz7)tV4P4I%Kri zHg|{104ZPD?OJe2h6~pkWEag;>k%`rPSidnL|TzZA6UJjR2dE%N>^jk;#GAqQQqr_ zpt{9n8(Q-FcBEK8c8t3UQQ2ukrK`qpAaS&V?`?uNNTNAagI=J1orC9EgzTol`#c#U z{8Hdyo0i&QV?ioIVbfi;w8ZXNcggIwRyZhrn3*W0t|~N8Hr0`ww8CAaZ?%8S`9x;p zsyABDCGJF18iz28kn zb&Q!cIOh%861x-Y`Zf(}DJo6ezIKdBQ-~v5-)ms^5Jk~1K^|NPiJs2*dd#B z36xNK^ZGLfam-VxiqJqYb>)gzjyJR&P9y6|QYLuV^a6co*dYDFd^3Ji79nh(iLfpL zM~_8t?%8y7&AtJfH^30#9bYcfm=?S_E?O0;npdC> zsZ%ZKS9|l+$!ztLxi|QuUJImzVOuy$t18eH%|c5yb1^ojPYPKbYSHs8kZo%5!aDVw z)S~l)uI5^>Hs^EIG(h`T1`j| zEgqS4GeDwVm_>?>hgV?qlL8p@D>Dkz-gSg^p#<4z@#|NL*6+kS5QzP*%J#h|5T)`l9b!+c?J5lyHpFcS9h>) zl1E~v)u|H6=z1M4Q$$#hs*a2&-rlkSq2+gXYs~=haP1MAVt7 z#VkqWwBG;6B(Ax1)AB!by&ONh9Eau?eG?MrOH>(I`1RObPD4!8w$G-6<)*r!^~5C- z(m2iKKmc3;rqv=jBOniO^ZRjfd6A<{(F!fbICYMc>lezD7*PV_#jKTdQ9+UAe~dob z9#xGGQLsBxLYzj%t@KvMO7P*@l88=q<{%Vr!ml4Cc%dXoywcxP;VNJ+uuR*Cz=__Z zSsGH{AevX8nij8+Wf5aLi*5T4C^nZQU}s!)Bw$o68C1%3#hV2L&`DSw4~KZ$-@w3% zO{zE)${mEqTkA;v8vKGL(lit|ZL~O(tKTQs{|N1$BL zM2sE8f0FE`Y48P2%4jlfBT!`z-X6_?g5@QPbU5sWpIfTjuE(cghkF`Q3>V$2-8XH$ zDqKlSN8oS`nkyp4cjDsSw1pwZZsw@-{#+52#I!W;eya`ptSS3Xt1WZt%%14#) z0KYj^xipkCt`yC~^y8sL>jabDDof(DC5y4beko~|ck$9xVsul9(b*>s6bBpOLb8fC zs!kll4JVdJw7qtcWF#5hWV%H1)ueg5CL*w}=ALVoViH-k8C5cks^x%?576VQRpAl% zfmm6 z>XmXhj;mM)h=ut$ai@AsY4HiE!`x0HsOz0qt)68niO>-j8VNaMWtN;U&vZC5lo(rA z>=3G!d*lD;FY{x=xphPgj~-w87umYAjI^1M*hHE#%25Jz8My@eo(X#5qCr(gywY!l zagx3y0T5+CxiKDlx1MQwb)3FvrHL@TiF8MJGFD$SnM^3u!`v*Qv{aQ#hF+AxYsv6t z9X`(_4$_n)k+IVrWkH=Xk}NDpt=k7|Ol;;}DVXIAQzSSHH9OgxxI|v$t5szVYVDuF zOQpqWspDa)8PRB`<v!&{VlA;4y3SYJ=P7WvQi)MNgX8~gUn0hGy^`>%< zP}NEdu?p1l5yBjAx>ytF$@N(-9a6g-l$E7KIJM@mZL(97M2*8J+lN z|CBoDaSP?@2sV6R4l%A@dF)?!N+>ix3%f@wGm@c%EaH3th>#FdLvg=Nc{VGQnBFvZ zd=nu>r_$8II4L1y5WPEy&Jqz9_7@$}sqPRkl`kRFsAS6tO9tU365L#kYF<1cM510k zs_eL~T%lL3y1itMl(;AhN(fcY8dTj3R%T|YE_Guc0Sv5c!eWtXp^~f}2ZH)tt9@Z&bBIgHJW8a!b*-8_Y=s>RDWD07Q9924|2l#H`I;4VW%b zFXIveQD~8bI7N#34HBeZdDErD1@fZ#p+!*w!m~ALQ@APzX%SOjoM9rY&?>{c)yqu9 zhmuuqewQMlxCNm_$P48%U9ksyd7%&fmpw{fF7f;v)$`D_m{FzVRk7!wDp{{e(3|5& z;ov&ua;?fA(`!}}R%+qkP@>q?Nl%6*l`hF(6N2K^%gF>fu`DNqm|U$&?Ch#A9-N0s zTD^*;gM{%Kyp38p3*!%?s%2I|F-<~9O(iU70u$<Lm4JT1?oS17E<+& z;}_^ZZbfV{ErlqGDfJ)zuAs!DYjKKe8C|w#!SGf^<%yZYXP~275Y=ml910w@ABIFX zt&~519KvQ!DjD(nowcj&3lK_6j=-2hHrfuqQnV2jt$|ToSq?vbzo^!J`0lC65^QJ! z-}jaAHZY1r{s$9d>DBFhMD$U}g{vV^7y*+lin0@7H~ckC-K+l^KHc;dw1(WIl2Iy( z0+Hf@HEB)9{M`m3`i_X^P{aB0;5xCWiHIg9pbrC}%W95m=c#)eKR@{iP47NsiOjj? zr63h5zHC&yKd<=G2aRpFYYvIx4}Ko0UCr&zHHAb8vYdfH(cJd|07XiGV!AgW7a

    f+xIU#{W)m8-P<0BzOw5r5qnf9vPAwf z{lAaSsdCm1L@>Da_Q?-D;#@{(?7V~=`_G>~XMUz%7{asVSN$*iA8Bv?7tn`qPceZna9i{`{WB^j7auU~-lf5pM#4?lIf9y?#0 zmThdgt6bfR4y00vxL58VIflC z!z@LiY78>}y(czylaR70 z>}!*G^QP4r)Y7j<)@*jQO8v3?>!#3EfsW0a7Fldw;C3YGYxcUS?2X5=mg1QryQwXg zuWUV^o#FOl>1~&EN_NU{_UZ*cmQVd4_Gg>HP^@Ni?&cp$&;Q5@nOYVCksC2_ForDu z5xqHkiQDD`x2Xj;er)&Myk+Y9;v3E@Ze*{~WN-I1-RyfL)8c26X8H7f-<)@2SyeZ( zv)5-kT7Qo@|2;8w%iYQVzyGp7gdQNW0>SA6cakPeFvLtBXCd`7ZOJa{%*+Zqj&@M3Q^@iVi zXsK1`Z^m>XgZ&LKA?vBEA^#?!?>2q4#F0P7u-{g4ew!-jAJzGp_Tg<8Zg0CE^iTJ@ zZL5d3-8+UrPiE;(qcAsS9!$+VH~RC~BDdQ&_r>PiN4~i)FVDQVk^63J+x^SAZ)3I9 zc7I;m{pl^u?YH>*F6d9!jXy8(e@BjO?|VM;(ssRh$o2t?KkrNbzSpe(JG|=e@Wb5q zl-%KmGf$@e-XGrnIb_G1<(e;rns+%ne%NgvIHs92%EeGV-wkpXSGf~=?q@wT6%y>++uScAbLPe|1cfmxtz90T*HbU+}aS@xZ1 zr_z&KfO=22l)#n>2#I+pK%bRfrhh>lW)IL(5};azmg>_{q+cugI`U7NJ*=I7CfnuJ zrB&+*e&4zj^$=G|TKs_(f)`K-LW zGQGRB=-CCXj4O{nCY<1ESEut@i=wjZRi~a9I34y`tC^eFc{hJ{DxiN5yzl5d z7n5>l-hB{h?`;?*Y+`B6t6c|r2Z`hB>j%EPy&tn)=d&_C)oBOHirgQ%T&OC~cbR7EDtwNCu?W+ujJ@+bStDbm zWYbC|Dezhrf_uD@v$0TXlClHZs2(P ziPYGwgx|COrL=Q&)y-51+gGF(Ryf1utH-%Xdm3 z3+7}GMwVdXQl98IqkG%ud*lpeesDWiS);=qXtNCU{$9-|f>!ktEXXTXk(@dVR`GS8 zj=XjH%xlqLo(5}62qB{}jjR64` zB^SyL+RD}KC^ zKI>ir?Yp;s;?b-3ZTx^G?;?L};l4AR?BQd!FVR^W^mj^o-`Ch*73a=h8|bg|p7rzn zW6F;2Ki+9({r$S;m~iUZ*RO6>-(-K~CuJ5)A>O}A0%12OOc11~)=UUAD^FpOH6qsA zB3in#R2iv@iP1zUpn_6MyTDMvbFP1U;kMPO`kl_eZJzlT zc7dIK?NrPNUvr0=?-iXW$ZO?~JuNMI{+mo)Gkx6>8Y^@m^^+XF3H0BxbC(Nhm)vUT z__C$c4P7mVUX9%HZKp}sPYO*xrr!2>9BH|!C)xBThwK0MQ7Nm<+EV*x2;b^)iP_wG zE7aUjuHobIqRM&`hby-O`yZ8+rLgS!Iz!-f7b>>2)Z6;nheT>^*t?x|+|KOI-LS15 zdpCX>G+oY^yYNE!-V)wD=5NB?7U~^*yQ!`F?zFC?zXMfw<83{69_(7_;d$_({*dQM z;{7#Ao`*FrqfhWIjowdQviI=ph9Td3XuHSwEEsLyw#q+AosD}LBjo^S-%f<7xE^L(e|zKH}Zc}v}Cf@_~t5q z->~^b{*$#|Nv8rmmpqbX=gnFlh~(2-Z;ko_u=VzsdqzSuEV0( ztpT00)}39w=~VR7m(MP?Up$+k(~sLJxc6Vb_xY`FoR>0YNCUOfowIqGmcJhgD{i!H%V>a`6<*6!4fDEym!@YWG$uX3mPFL6^B z6thmQxaP1VHyd;L?yTKwFW-AqdfM>nOM~H@M=zGHb2VzaZr~Pp{=YY`P8%J({(jX; zldyN6{F@K1Mvdee)JD0?UUV)DdwNmw61B5!`ElpmGYD4u$qB3Pid8)yqW2VC2d}>T z!#Mg;_C{+Fv5=-PJUTibNv@oG$yOSytl7zb-G}ka+=b4nHeeJ-AGtbqYi?Vu@Zd|q zh5no!sO;K>2k3*Y->P;8?sFn*mRBc(pp-jU*N=s?vq|I1>Gt4%moKO>T~zCllKJkS zyFGf#-5b}V_BBneIh6S`ZvNZM194A}yP!X=?0gWl|4LTR;g6q&{6i|X$l+vx2nheAUY& ze9)G24}UoBd39Hv_PbKG;P_bV$vvN~*ZOZUTEjUzymaXSi`bsdqb{9m12*nH9e;iN zbJne&9%~YJ&EQ`DEYzt^l4Z`ujvO$w4agnSKiQohpqlwa%lI9ikUWKRALHK1yxvta z1#rSd|J4`oJlIh*YcwKcZT8Zo->QNR96I(V-J_4!`gcRZaLTH!kLE_bT)uIZYu=h) zPv-V#ne@DA{;9LJdy&)ftE82#>We4dyn8$S^JxANUxJj@b>T;f&#ccra_!iWzZA>Y z2@}D-fwAeX!Y2#$KBc`2=rZx#eC2n{=fuLhtDe5CI;C++s5$#AASl24U5jFc>-vMC zeZ3dxw>Y*x*G)tW5Fj4DK_nrC>*~g5Sd*_Xe=m+`Bp*`ep4m1Ygx(R1_uw&6im=#+go zrc#YQnvWUJ5tZ_`h;a~LssOc<;f509W)zEO#9AEK z;v5#6lxJypi-KTLJC_Fh3tO;c~HX=$$kf8Yl^e-fCxbb*SZ-e zvmunbVT{u=1@;eoa$2klVEYyX?a9^oB&NXDl-^LU?135xM>)7=>sR$f3MPfu{oviNH%~3j7m9ZfN z^vfNouXQ+D=g?omYZdFaau?JBrytfr2$>H{-B@DBzksTb(y9WcIg*3o`Zi@7GRO>UBgw+qkA>5rw@E$QT8^Qh5?qiAEIV#aw;r zAbVZjtQaot1MH}!B&REIT@s@fvN!HHuiAj#iJ}Jrr|s5`y8AU`j>4_}PPB9o6OJ+i z+#g29XlAW)J2drh1s1= zKK^;6{J-!d6EWJS3T-M@;4v@_z#f$lXakH4G*-XelLbR#a;+H{q{E<9teq-Ghr?qC zMFR0x$G3as)KSigaSaNbz7o}Ggl7=RJH$Mpb2&F}GtOCE>^bL1z}0TifURHF@8@3n+tv~Yxk44VP`6$9`X zKo!G)B}P+G5EI7WfW{{j0y*!s;Cn$#2Wk}&eB#+n+8u}hp1Vf#{>sWC6sX?OAz0h1 zveOAkrEP0R09e>)I2ZuuddTyEVhDqHM*(t(p|efltx(psOI?v5!6$&lfUK46PTrS^Yo&HwFG!BSY~teD7An8v9v>;`mo2{BsiTqN@w zgfL!KpcU3hfABR!PyrN93Yjk(y{i^JU?|xQojQWJBw?5qt|&Xb+ec ztjc##iZDGsx7z!fwhzt74l++sIH^#&f?S3+hhY~h&}?gS5`q^x%B)BZtTL4^Q?+x0 zK(>tMU#+qMVy|@GsY42b*JzjGQ{(i~_(Qzpp%24iAXz8#r}F?)A^R(?ihwPl&)Qx} zzf_rjD({5HXwXc}bsu|&F>cpi%iu8yCqw2P-`{FnN-|eq6!!NZ1!G1Un1jN5HQ-1O zgVHeq09xYm(8$rHGm4m%2B;nVhARRDQsN9h|N4iZ3!rF{4cA zj{=P(fRX4ZAm6h*EdlHc+M!-xgftU*A#ZX%9q?T9l$_TppWgLW8E7w&(o&(CSFMDc zzq-|nna=3DbX5Nz+1wc@zY9$V?LMX|1_`!c%6t?MDCTj_@(6K&DdKbDcyzT&RE8pB z_&zBRsFZrDrJlW9mZ{8hq`D6`PiBzB->?5PSdz2|zd{#Wk zlOuB%a@)grG0rmN$qVH0s7$^O-NlQ;_YQZ{xc7=#y>1R7zN@0xfr)3F(@^#hHTU- zf02qAzS=L#+Vc|xdI}6kd>?x+XMHqsL!k-E(0Z{!rRt~ddde|KyIyP`iuRr1Ql0re zBpxP9>?oFb!xHCE)utA}OyPS}qYU(A?w@#Uy2N!->Qg@#pQWO=z?e9m50!6}QOqlb zo$4n)w@dL}vPT%1Pr4LCKt)$|*k+_s?743BW6VaB7a#4Wf1ZPp8R(z)P3PM!zu;7? zn1A4zpzF{(IbTBuH-kc9%Pc7-b-*jMi&4ZU*tdJNfS^vuq!%{tQZZ9hP8fxMkJS1# z#F|7~S4(`hB4_kmPL_BtS-;XBZq505V5W-kUEu_$;MWrOcMY^C76%Y_z0P=@NH` z70t(!#CB)-=B@lg#Sn7(iS~k=Sv(|N>)*(=uH*v&q^eQ|HKJJ~?QX?9s<S}+Ylza!ayin zv#=fcAN{s9BVTwPQ)VAG?$|5UcsfI~*h+7KZ-9O=t(xbVE(w%N*m-D>F7b1owp62y zGi0`OUf`J0KfRrhfwr$gk=nTa9eirtQQbTRBVC0MeSIe1`>>#e1hfT7W~MLY0N?{} z^BCkYUW-}sUja;>Pp`x*6w>0Ot$NW0sWMkqyMG(3Lk%Etl*ad*fSk)}rWGAY+hh> zJ3EI5ykr)zMlWw8I~7F`NI1pHIT%%7zLHHm8UP~10+V72~N+gSb03k z9XvNjl?{Bx0xKuQeIhtv(6W?r=`$*09r?0RX+5fuMFL`E>90>=x$R>MO#pnRa>J79 z%8th4VLriCcHoxd+$X0ZNJkQSI0pgsE?w+)d-aut^{EVTS|y-1_iTtc1MYfp*Ps=z zjaT1aLTlde?*gN&1t9tm4lJBI|IU|fXXd#mT%ufNxi6hAdUiSbw&oWYdxiz2T3L!e z=*W;*?*&fVKU*0&(wbt<7Q7)E$@lnza{2N4IbV9*`KZdujcBnUXZZlM?Tm-L2?*?s z+HW~5G;T<-KkYD2|7)~+Xz5Xu=!b(M2m}H3p%qqi+lV{+Bhf-Tql=M_!1O8(kgryP zco3Sfi{!Or>~8OBn>7a_!X9BkY&r_I1wPE%^tik`R^)_+aSRp&M&iWYGyXEuldC1i z&#eZ+A$<@&hL{t}9%00yh{ZVLm|a);58i5@%i=7^{wd?ky}$+HB}pilwhn+3`r$9# zZ&rnZGF(|@*{DEk-B^Rz)}WPnvOHMP2f3{RLQRs{lAgYl($!FEfgZ?09X}qs0VS^z z)-wi6w=fwijblm>TADx{YNu@kb-M1#7%Sr&`xP|JF$*8_z!?YH0_H)V%$noJ=b<|5 z%{=@qOO2gQ_~q|g^u$2arlgG?dONJ7!N!Yrr6ogV zi1Sh$SwIvKtvZ_^@rIZ&!Tw6B(iY6$p4M@2KM)_UURcbIqAUCh^iF-*lvO~297<(W2O zzu+sY$^t&TP%+ZtLx=TN6yMPp9ZiD7DMz^9Mp?VJg^wna5*EI|iUV*j@{ZYOT{(Be zplh_AzA3(Ue({R?N}KXoEg8dmxor*SY5gC>t14}rz3}B+h-rIxR=)$9o*c$4M039o zP8>o1O_NsJ8gE&(XHyKV{s{NWTpRI{(+&ddLyVeXQc}^}!K3VDcUG3&>P}42*|Mtv zm7m#5Pd4Wsy)tC6h1EB-4@8X*QQZO~k=je+#dsRGEJJZP4s!=8I^>EHQD<#ij3@u| zp>^oMyrXyC?_c$-)2=*mSuo~k$OB=$SuiF|`d=K!+irjO+bd|3g&8|<=X<<8B`Dj` zMZ+G%fBlKHHeDA@Q;j^vHET?wK25C90@GOcTcVOh#nyVKa&GJYQWP%jl3_MhVFM?{ zzROuW%JR@TCKV{{$c$w2_Z$H`qi=U|S-;-r2ATi53hgy?h$o3>W&bAPxK?*NR~C|?#9}TjALUmlBwG>y6mHx_Uvd;={^{{ zWi(V?l-yQ`NwW_Dx{t`fOJ%UOD1?P8$A;6Asgd$v*KaL_z^mUtkE8I8dZe`m11I!P zw=;VqiHygV<}!JRGo;wHRzHbUlo4VzSc8x1K$BC(JA84LMTqZ0cX3&vAv8veL>jza z?9B@h)M&`Q)iOhksKacqZ`Te2WTq^FtUg2%L!)bGhg;EsU*MAYRkdcD#|oU+_3iPL zqYbNsc#2q25;<+D{gHl?c}sy}O+)17)p(W`7vq_ZX51qv92AL!Ap125K^!r&_fdsK z+kpNN=Ix-$#68Ys>caBrV22i%B7}LQ!i*5>OT*B%3-=Pn`cahCLRoYmpT45uo+I^+ z^8(l!$TDuZT^P$tgp&=I6JX1#zGBaG$hfN&J=UNsp5Lm*>nGpVjY=%CLrkov#0BpA z-3ub@>-0J~2%@T7do@a7etrh!oL8we%PX1wcm$1ICePoL-bZ^5<5)gIY=VY^rhJ}T zV4p|ZnGCD+3;@@7x^icwV;v(_7eWZoMGIgwW5u*WOJKPtfvGlnKnR1*skK*jSc3Bi z{{Z3BHr;PGYDE>;7d*;a^t#rdrCVV@I#~qD`i*_ZA(uEM+4HR2phq;v=Di|+G2jva zdrpiG*NS7tGd|ChY9FO)r`t;@Cxr=?V2qeH*iY1JP`OuTBDJ4@*5APL&dbE(t09Jb zdX5bzk{Fg%OCFWyvlSx9xS`fq$3dIjB8PPKh2&G+adtE8!WH%*6r=~#YaPOw;3OMm zPx3wFmJR}T9b-keENi}m&;$5L3=rdWRHCgb{;1JYaWMAhEQ@EQ$_?*_2B%4Qd&0c> zj2Rq6cmNfJm9G2(V_`4THX2^_A=SYa1Leh%jL|l_RcxN1UkLb0s8mm=+&Q=yj zvE-yX@GZ}lcRt%6@r?m;QUY7*VQm?t5ZP)q3|k^TSQbiz5*ib)o#G2U~C; zr3@l5KVWRWeJ4Tw3epWR&g}Hq{vdN{--o%Mxp3 z&5w&m>;BtTU~8*}B4HVQZ7=NDfYQd-ODU1^IR=PVF%lyuUQR9WvT8ytF8*P(2F7_U z885{fp^bZMIy@`Kv4X$>#&S)fz;kw=cK9|c-3rc4qt-{-DWoK{Nl}M)Gz!dSsTe!N zprj8#I)Z-2%^YNZ-?G~qR_pf^-3(gU=(_Iczt-LeBe&AMaF+P6F1Z)BYRLS6rb`(Q zr}==5+9OI;Vog>&zxvkT{H8mi+17L^(#o2*NeQ&}(s}5p3aIKN3(b}LfhfgBP|PII zfubgbn#U3;Xvm}kCu%%{>N@!$F4iBA;G&hVipZY!d+y0TMw;OhbmKZqh89lf*LOex zc*RqfvI^XcHDSf@uP#Z+6mHCtIx$xnv^`=Xb^1JtoAbLz#(Nt8o|WNB@Qy|kjhgl zF4%1I(VsGb)YeIv1Y-p?`}B9Ic(Q;zn)>C#Id)?Sov!#IT3o=ovpZtp8NuHiZ*mMCV4&k=@P!Y@QI+!R2TDs*#xq_I7WQ-pX zx59*K1+hYblZuJOVxm-z26DWi8dJg~Y-Q5$nN*F^pj3fJ0Fp^ejFwXWtQ`H!#xNHq zmcT@3+GjcwBOW&xcf~lnBcDSkh6$BSx&e(=s?a?-Zm~{Hb%UYp<;L2X)O~7*j?@S; ziR4Gf)d}e$>Ru*6f81gTvN6h}Vjmf-7n|p6#OBIKYI=>HoQWMA#}R0T=gSQ@-!wQg zZh+3DjyD-x8#kbg8xf-dlGIH62FrY4oh%|b_c8J_EfdErDdm6%6VF8&(rC2#8>p0V zOVnfIM0e(_Z^rLwpUE4HUEB@VR2Yv&8g8jDcK=53XHuyW*7SvEF*CB7&azY?igxrzEB*Z+%Zh}q~pZ(%~}LcCB-8U$D`0LUU@<($Pm`QAH` zv7;FO3`@Aa5Z}JH?}Ui*a*kEc9JK3cuh^2qy8uD45&e2EURQ*!%*R_nAezZJu+i_; zLcc$r@G5!TUv4r`V>zdB&yLsmpYHSDbvWh;m+>Vib{*^%%`>Wmcf*UdqWxxNd2bTW zx;|lH>IFMQ`|S|V!bW?WoSS13;pY$?yud4Xo@a2He(>^WctMZACi%XHK0mT&PVR)a zd(RxtO76e}-&@lY%6PzCm~`K8(xUZ_VDqFPGCH&=+B86UO#xxWYE7^q5t*F zyU`N<;-&!Umu`D9ca562$}8fwS9n@9K7L_<^XUjxbm-3(UaV_fe?GiLjr@k$2V6M; zS_CFjH?NQGXbH%P^te8vqgWSV&?3ymqhWfpVd9vW7|bPhi5VCA9Q{66q>6DdB{(ZI zR*nUsN_vZoVr~g$!vvw)!clzKQ66AyHTyXhf|+ltt#mrMTIMiewx!K3z_>07GijN1 za$f>0P7njaN*JMD5|b>AE-5hj`VkMK!}m<$%fy6V6QTF^CA?Y`{<|gqYu17{`<8vw z>`S=cv+T{j<=^&2{Cv7Rb}u1*Vmba&eBZ@oI{QVRy%RngBu;EfBwq4D@2kraL6dSl zx&+5jQv#LL>5X-}0BA@=z`QzYv6(%}BpyZP4X5b!nSn%eElYBzg!rt6Bo-Klqrff^ zJ{v4zv=DWbnA2+NY!P)fLV*&Is!^sa2^omAIb3r89tv4vl4q5Wroc~%PCrwVx)gd= z5?qx+egg)+%V2!(-X*8J12;2UcX8<}`&@^z@o)F9|NU&e#xUdeGhg(9 z4DII`#LEdc}GEJUmS{QAxex6|zyTR%C2Di-{++#P6&&~9`oavy6-N2n%|El?Y z?@hheD8pqn@MDx=xFvQ2$Szb{oK7@ejnZ~hx_R{(%M}b;CF2{cb0|@_R-*0GM+b?sl27P8CiMb#nK`Js%S28k` zdavcigA!sRlQCFB50zkFav2@mO@$)E0YmX~8@xzCyepx8Q=2NlsOW&igsx*kms-b3 zv2jwT9H|RkX2y{*rX{|;l7L;)Dv zW)^O2EtqPgXv(M|HD-cDl62722c@-yNkwbiF@I6Clp2$2g=wv&3DT`7S29`&!dykYYo<`b3nv3I=N3p@#SjlmU>N_@e+} zA@nJmezb-L((f*Qwa~;iF`@IDnI_T1)WGB|0!mb4D&i_b z7FTMQS1w@eTj;lM{*}r_2P<8k>`P|sTfBJxnl1a+G7h9M4umW|u!gaJ^UunE7*&N= zD)SFkm2NSVy{IZbc<}Jg9WUN{p=#=~)ZhWra73y77e(G)Vne1OS{#~@?YOr2RgLZbqZvP0$%Le2|&Rufq>&3a{S5MVl z4MttedV2F}nZ%gGG&oelD3M@(JxMs!XVQgAS(#|sQLA&S&*B-EIs(k#{V9Q2I!eMi~>~HNQ>~ zeK-eSCghrgPb|8ezqR%F%hugjTaWu+*>UycyYH>X0z#+Q!gtvTWG?l!jSf{w302fi zKg0USDI@Y{BMR+HeDrArwF5@mDzr&8S`{n2pNk+&pD7YoY@b+rao@!!V!{ZF{$)a# zQBZz}DPj@&9H6}vqeI0M99PSfsq+O!pdZ++F@kGMofCj81Y|94>IHyV4XD#<{7W(6 z1XCxfM6mQ9%G|6ZGe90R#@I;=HqK-oH_4ncm67{;BPzCYx5o6& zUZgs7rZd3k_Reh^QBUvei@$R?aKlt%){FO;=W|9$K#F4Ham?|@!l4b!i85Eh12-(2 zn*BUqOD-Qujl)$ie-hlrQ`BF`HVCfy>swbSe;p9P1hyz{`P6%IHzZO(k29uYgt!PH zjzGcrxUI9!O(;-nkAJZsWl)Z}jd!!r!A$&JHlcbmCGqlT?`zqDYxh@hbRuUF8WfKc zt`$5HiUX?h)?dqyY<>Eu`{|Br(zM=Z`G1~nKJqNNH-9aXQ#c~qG^=;ZAL+g~&&zXP z?EWK_y?L>z^?u=>-o?4EcK+!-X7=jhwWm$jy#C2a*a~nwCS#R5VXTSTq;cQr{+K%c z$PCp)ee{6(-ppdD*lN8T56yPgB%-1egslpE9#efj3QA;H61(UyK|(IP%K#L)~`Dso4huW{>=9{$cOI5l!sK zDf5xzZ$Ivt{pr7>`wGq9oqYT8{@W_q^^wBA2LqOU?pyZd*xS#2+dntG{W3A@>*1sO zo40>D|9ABA+pnl&A5ShCYkvC`z5L_vzhn2Ve?0m3z@6D&t}GjQ)r{V1>Agv#z@8B4 zJQFDs_AXcNU*5A~akswDZtz%3|)h6p)a>)T%%&PI@xw{$ctcR*!@I{cb-Y6s8O* zs+a6tzo_N%i}wrGwuEb%Pk6fiKKXsY^J6zZ-nck=W6tO0nyH|5+J?-(_p8Rw%au|z z#Wv8sOs>o)m<6YVHr3wTP4`?~PiX49Rm#*OoBGk}=N4Oq_=iA-d%q`4k;qckAIpAHKw2eFc4#aT%syPCKdk+XG@&=gSMZ=<%<2ZhGbJKp!VRoyP6P{UhYi+&Aqb(h)C zpH6JMGBMm;H_y%Q+STtLdz;r(HoVCR+2xWu-WU0TZU>%6jKB0g^HQJFee^T2JN1Q6 z1d8o_@0mTInfbRe^V!c}U6^$nUU*OI`q}YPR~b{SBF_#K@3C~FH)G5n1#T>`Y{Ycq z-Oh~*1z>+~bA)$3Mxn(yQ`N}l26V$Jm$%f7SPf9cLt^4O!A2bmU|&n}Ifemuyl)ow z^^$ok^_^KC;`MIZd|)--LvIl74QCLm#^-t87!#_3f}({Xj`gDUFyp!$>oAZ);&*wi ziWcv7f7c3$9o218*lAxIL|iGDQK2c_nPsNJH**|E>nmecj~=gDdD;xWXWRHgDxk{3 zsP8!*MYD6Lb)i0I%c33x{m@t83?e>RYa0s{Wqh&+LN+(4Rj33GpQ}(5dr(&5a3%wtAHZ&~A_4xVs~_3dDro}iPi>FLEcJFR;MCjNr-ig@?7ahY zQe;~8PwzI9OoSPh3s~)}C+jp^uIa|z42PTxWX2b&htzC>3MpK_4{~n>d2~|qu$w}g z!`YAJG=a-IATOAm%Tk3|jSAh#j$0HHxPYA_?f1N|HEv+v=8`PYbIkGXlUzQf9HQ^@ zqUN0thPZt1iH=>nsh*)2E7b*)&;p-kq#nTsu;m>ASJjX2=@gLo^I1rT!hB03LZ6Ik zG&W9h9Z$CmV}p#noxK(I6Ax*#U%N=W`$v)qq;Klld#-@>WT$%(RWzj&|7yeC#4|Gx z%--(U=)bNs6O@lLN6ktY#^=w~6W*tYE| z?gU!?^Uw503!IHEwf!kBQ~?=}i3!yYX0&t=EVSPOyOx|Gizy!0q{V`4@R)c@C+dYi*v`X+?EV|;N&wqVaTk_ z#`S#XE#_IF7NS(ls)``%5zAfniayHG-a@0wZ>9U|`9^ouIP75K?oADrVkmJFUnmKbd8))xy}+EMe}J1Ky@D;y)L#6_Z9zKrFCIonBEyNzQjnvYRfH~%!aG*AJD-araLbSke5=Z`K9ZD1u+r^? zZ&~k(qMDfk!%B6Y13ad^l5K6YvI>hgFRzcm)ESf$fX42eyIs(yByvY*dyLkFyCGfB zKK$Ypj{cT&3J?>}KdF!n^Xa_{hrXXpo$ti&nxx<};Ku|f1&a6rp zL#u!Ym$j*Qm9H?5XnMUi#TJV!w>HfS-4SL+=x{W@r}i2oF+zVNTDyLHk%ooaNYs zs@vg19wDo5+a{jh@Ai0w=jK)S*OfF^4~Jhw2nUuwF<3320nPvrK{#ZJ2n-F-5KTiH z<**q}&oREUu|`!$wGONP-^=6c&ZfHCrA+=V)45TJCo1e0o{qnB;rP7+>|_Mn?O9iP zh_ms->pRUSA0C4@pWdR^((tG*bgyH=R{Au#IW6mIZ5p^; zW~Lq?^SuOdE0fRdHA86zuSEN)t1pG4GYgKyYGL#tT2S+m^S~v?6sDd^p9`v z|9AQ8JB3W!BBJ_o>EWAuos9Nu`=6_$owsEfa<1^WdCh`A(SK84)vDbZiv`!)z6}gL zIkWuX^*>J;L8c==pWjuMGNxCxzI%AAfBecHti4)r`{pI0r>5fvWh>gJmnp*#Gvhas zV_Ge?LuphZOOKFOlh6E7C@f;(MpQ_RD4}B0(#nyf9jR^hb=gg0?gM+?y3{`SBz1IG@FjHQr!a`-d9G)q`lu4xAN5c{qfnO?+*<9iyo>v{0AO7 zSp8ixbSVAOofE{QR)cI+X? z;?xD9tHt;Bk>iq|kQm;S^uZza1fl+Z@Pgc3X;fS$Y7zRxvM?3QY|%KbS@i4vYNv=B z`jg@?w=uB=#rw6Ra}*W0%e1`5*q<)wFXneQbOoEx<&R?mgnHUX#wi?=$Y}~wpMViG zKHKlqttD%S4)Ynh^dHq#H}XKawx+ZH%Qccj`t|zEkB?t({Bv#V{P2rV`k27ZH2#Yp zPBQ+;Mk^$|{4})5a{MNJ)l4}~lYY197PHX#TPN#4`nNmwC(nMn>wNjsw|nf{&J$f; z&(kOFw?!sl9|Q&rray%72}{~j5}mx78Kwy?9_0HIG*(#MsfhovI(~n;|Bm?kv-r)Q zzdsihy8P%(Ik5J}i`0|<{&<=GP_XtD_YBzFY2USW>P=LQ%(uv;LBqQ9R*b*?xM-1O zCc^izGq1qx^8GSBP!@9T_^yiBbwA%%twI1k)tkTk9F`Zl{u-%2ulm%xl?8SYpf6^)lxNup@Pz)#-bZsnqY&< zbnrDy;0dkXi&ABm868O>0;{y^P9`=*~)1#8%PGLQcDVOIcVzigee9eS=$8 zDWwVY_b5pV!0*c2dDEx%a=zTrExwvG%f7;Cq$dgEmk7_R;!vG>_SlVnE7H}2Zq*Uc z?r$1d^Clb5R5I)7&m%xlj*e2&mf)~@njX13MayH0^@{6qV-M0`KBL@B&_8C7 zUZJ*FAnY_Qsm*s|_glPiXjzrC^h~6jS2?HL{YS?k zCqFx`D1@46Jecf|XdRB#J+mvCX>HJ5>f58xHLq`CdXJpqMjIPWN7nOi&K(YFTv(Qo zmbW9%;c-wcs-iB2g~Fui&RIyX!{I&Ax9S?vIhxd4 z8@3V+vq|-YyIasp7A@3T!`IIG#+n$#8_3xZ2T$`aj`89dwL`g2X(qa0o%1=$6EfPq187hCuUx`r z`iCVE*EII$k?Iu4Y)pJSi72J5%h+o@7RwaQ=;)obwUog2IGbzt0$m<=9`}lKgo}3R z7($(O&Xvx@!;Rr33x$VZIco=#kme4RL^J>{)U-~p&X2b0QpzRm7b>@T7xvti!&;Cf zF50x0O23z9%=o!<(f0A6{J8Iyo@bZmTXw_;=*u;VuzxG>s86T}CHg@I)MXfqD@pot2P(P>c9Q;P|?HfNpHpQFk!YqP?ZA>mg|g zcTjvfz6_(dxLoM8vyLr0mNK764tfm!nUltE+XxOIh{8z7LUK9F|<(ilLioXf3=lV!59i>n=1tEx1L}Ex(;NWJ$zxS*~Tl zPdWw;bibPud{B?s+;Mo@;tb)p;<~UL>`jVgOXx_MF;f*~WEgod)V%4?Vk5z@!Kr`Y z*4wmsjZ4nPHsu6Yn3fAb@XbZs*68Yw70dMm7aY^RMONFE2WV0{JWt_EIaY6^bl&}w zkEFFYtL%H6K$fW;p?PuAWPS>Fvt_wSXu}m;>zR%wh>6C7d?rBhn{ zjd|fw)89Koof}_LT}@IhCLHwAY@X#_=3s)EL|cY%d&Ud>>ywR1s=DKSKTG4EG2>7T znBL!AxiV+jAz~zEYsbP`aa!vMs{RzUKaZ@u8J8O)2wO!n+dvSB78Q!H`i*E(=5jB& z1Z|12wxLBbsX1)i5;Zh+5$yz1ojBB382TGbB{QkZgtV34Qr_*`s!?ynCIVXD0!(6B z0gqZ>^$(T{Fd{IULp5Yl&t8yen5dUAR7)Ys9S9?f^7U(E+D&=HzWmwTf4o?crangO z49x6qG*`hy#ZVDvzYi6&wwGpTK%0t=dh!HaYOB|Fa(4gZT59GFOyU|HCE8xTStCT# zeo&?&7hcdT#4ajZhKpRYwWWyFmqb=B+l?;@%9<_uPO)dNxoJf;X++(v0Ll<*#`m3D zZ|!Q1vC|U%p*)v_f@ruHQ{is3BpY46EBVqwk!=yW&)&R~Xc0`E z&flcP-Nv5uY@^OaqY!I*@R3G`2h=TO?qXP+;Y5hkAg+ZLbN~~x!j?)Y3h|ap0)46Y zPU>>?ZX#D7Jzfk+%yzI6Y07I=c1AE&YbPF?=%80#VbOy_Pf+G+V5&8P zHkU^&nnQhHM>T%$l`}#1AWQe8}jc4j51oFJ!ybId@E9COWMCy7YQwxRY4si071 z8M&qD86jYW?fJ%H4~IQNA=Kk()I)e$b(z>OFN4b5tlhM*w;aHV*>8nqP)&wm=3%WS zbm9+ae=F5EBPidG_SK$hFa|zPQ(SU5gC~}20FkB$r5MKO-9zgJXN38PFu`SeH6qME zt$Y;f|CS0k;0`>Y2_*FJzomjwhHuuN%=`b43K#0{?EQaAh2|6Y5C7j%L8fIE*3uAp zV0MXP!rhirPfq@Csccx-G8eppWdg&r!SYbs zQ-c-ZKBI$sg~9(XsgRaLSXSULRF$$nZRlX?iBtbWDvS;t&bZ|;T%GkSZTLve(5d00 z+kT7=9~0voN91{it4C^zY)_BW7W;e{sVfWqKL%-|C#thQjGmMiIDTlT-@p39DX%@! za-CUYV(4_^tue|-lsn=xZa)YwX-tc6rV zDoL4*8C%9uA!-OwDn!vbGPY776;e@@q(U2#`T2ae`|~~b_x!&1eV_Yxet*uHbLPCy zHP`cfU9Z>s^?W>Uj-{J?ZJ9W|_iO7^_2sX(W^M#4BduFZzTNrpV(+)R3m-23Td4p- zn$C)0s&TVzs5N!74`kQP%(esWrgIY9wzvi#mmPI;otj4+e69RLK$ZyY|4}OF9P)TX z-Zit+w;i5IO1qb5P#hVi!HVKkhe9w)dY!00MzQfe?d#tCvfO^`bT|>VMSB>Ob`U zt)2W&^>X8c#|FdyR4@NmBKX&O`F|3@si6ydv24d|m?h-b=NJDXg3r%7hj?pOTo+Vz zGbjz1Q=_QMeTZRB$S`KLUS_xF>#N3z|}AnvO_l zHCps;XH^_MozjTIJ)r*jv}RQVx~^O-+W&_KwZlQb+c`qJkQz9!xj61$dW!&I4-QvBxnb(dz(C68x8R%5{sMK(M&<{v zb%LO8O$)cwIC>q*{*P}+DSnwQJvCg0WrhD5p|Un;4}$=AJm_+DackV1%aC^E40Vh| z-{s=3smp-Pju3@}zTU=B=X;G3lWUN+l)2ueauLH?3cK*dZFE_WO0Dx5_)e9RfqAX# zK4frxlf}ndHjf$a*QHSdYQ$~MhW>L~ly)u`fU>M*KlfD=iWq&$nBvf1&VHuEz9`{i zGBncXWzH1Dag+d)om!C|bfJcL(x+X}8*nYJ1xm)Tm^&lCn<(oTDLzA`w1-e7K{DxA)tBvIWgi1lK4an4Hbvi9fS~ zSL;xr=ajBF4B91a>)Z9%!s2gl$lm}4u+#h6@zo^ArZ(q127&QgPoW`S{?)~6T8CAh zKWUH1Q|KsK`fGbSF2Cw|@GuBM10T~M~& ztNp+*CYPeQI^v8kM6xB}L}*qUoX{`aoc)0-C5_N-uz=05xy*A^4t1nHCzTa0kBEUh zm#{EEz#1*cA_O(`vN_e}0payhSNcjh*(>lO!F}1i>qI>|t?mNhd3*=;j-|P&dV(

    8ln}1nq|o zeu|GE*e+D_zEzObG{O}Gd1U$Ww!(<90R3!58uuez}?CHt1lSp$wfZ0v3 z=ZZr286Ks|V#7JA_r&>#;`8o%GL>58BKOY)fAa5he|8Szx<2|xE#uUnP1VM$lkN{yeK-*r9{^9dKHD9AVO$y9zdv5(JxkS(-Wyn5!_@a7> zg8cS}y4z_U&mLqUDJ`47Dwg<_{i?j`*YPbm+chx!h^sE+e%i$nq`gGG&Hz*KA|ATEI1XIMCQPD}`Js%ODRB7|b)2LuH{ z*2AO)?B+o*1oBDZobn^rT<(ibs=(mDsmwNdH>6UY@HZ&To7&<{M2q8Y3;lqdB%WA(h3_Dvn42!5 zWHXofnTp|==s$&rDScvRNj#F5vOu)yzCtO?Fy+y$K(6&M>NqP4ZuSGU2PBV!^1eeH zP{js+I~2MpS{R3dosm(CYlutnuAL5zg`?Aev9O1!reRVDX^|g=;efcy3p%bJIt?{H z52b>rNb@l(8(EI$O*4UpFVL;sND!*&@KD7*Wb?J(bE%r4Db{@>&XV7`;TwKE8C&GL z!ov|(_ph6{CQaDE=8;OOks#x}ZUr=c)&7=g!iR+J_m`~O)V1|B#|jNy-aP}Xa8tVN zBtgBm+!hjV{N8%j4yqN9M_i7$^7$47pH!o*8KyyeyTYf$!}s$e*NvBQ_ z{h%v3BFi85eEa(aPx>C|>L!@=I4ey1%(7IFeq?ungKy{QO&tIFzD?GDcHs8O#qo5T z>nD#jY-C54Lz<3zJKP~^uv>^_K37q?TUp<$b75l}n07AHb;56dp;}VlY_Y?WiBytq z4R)3-^I`f&;~xEFuP*53er3HCY<1r& zJYfMFQVZ&?lWDYMlbQ%`=?BKv=qQ-Wz~FB~@C{fFh?wH1S3T5e0vp)#@a#Ea_dNHc zLAgxSZo4@&5kmUvk&wm`D@jT1pN>d@?SC&gF3E-*^-R3OBZTWQKi-|c_Hw7L!Io`Y znXkMF#H9~BYEqgAV1G92< z{$LY4;EH#rIplcahk?EDav#K;#&7qdu18py8pUFUJcex}t&h{{hc>@6skzR=zO;ak z%Ol(5Z3##WI0B^rn*NCm@dvDDp|@$N_?BZ&@&U0S@csw($ZsrRHmT+}0IE+i6CCXN zg_ZFIb_-z&v+E`aY)IH~$05%6X@#k3z`KQq5o}P@bdUs45?k6<#Ijpp$93G+d#Wu^`h0qMt_ zYSbg8eKPQVYnOCiMdG0g(HR_Fn2r!Q6ABb{oREr;RcC07M>@Vz=8X)C2a6kea!ROHp)=w7!S&6_3X?4WS zD%frj+rrHI*$-9n=bAo)l7JlWci;+4A<9Z?;hDcmo}>KK$*~KjdC&rOg|49(`JO4? zF>zwzjJ+}li3}r4g-7p>C9A18?_UF;8NRFhh~_|Gu`eU23n(sBP}>gt4FGV2pl8vi zE>;As))446l(OiE&K|hMER6gD78{!6v`68?GlkQ8fYR;^DLGR~1O*s~)d&knm^`{X z=GJd46j>ywSjTTy&{;gA4=-FGXXCn_0zw?9d96kQ9LH-s{ zDpLe8iYYDps0tw-8M;u~r&i{ooGCM&W_B@$Q%N~w8DJ3AS=Ftk{>&=V+9$A}cRIxCZEuGzoiHFaB zN|N=l*XWbq0|I@xj){MvGe_dV;|~VC0J>fRjeho*DDD=xMA-~L=~9G(PlRSP;Qt#d z?5G^c%Yyj0B_}I<<#S8Yf#bK1Bic}q{!~O5LT~pTsg#L1c{-zNq3Vcj>V@A}d%`+r za=E+@5}v2_cU|!Y(8a!RZf*mUl6ldCgN~<$hN$Q(!PxW~xvRBjP~|>(O(pR1VG)T9+yn8Ml`DRUFPv9)Ujsw|u~CEa*6KB%l`|&mQR`}ffL~Wa6=0jm zbTS&4)P|1h5=-;BI*J(OINY`<$84Wjn@CNH=p{aB0OY)j_tmh4w8 zC;zkvHCnUnTkn@}<9@Mg^XTDv3>lR!gBOOQ(%K6N?2>Z*9 zRNg|g-EF+U5E&gYXWWB_p}y^faBZD;909ySQCnCLjnM5g1qe_!lNoSMN-)>b( zw;qEi-n>8a9dgt)cc9*_Lu1=$kJISo%opr3ANM;6^o_VXJ7NfkSaOqecI&tAPRQR_ zw4Ia6^Mks2Km1t~iEP_x@VwMRHprDz*07LeOb9X( z>HvZmF|IlS&~;LAU1A=@Oe}yMg={Tye5nMo7=uCdMNe-w%ZIs3|B*@v2-MDg65d2M zG(Z!;zjf0>;tfNq0o#n#onK$J&HG3m&^X&5PxTpW2K@g3K+W-y`g{a5A@g$zhM{CRI=$~bx;60&uj%^LCr z6w$;Y3219ddTJWCZCSy$m3M6tVa(hkB^OunIhT}mxx0a=OMvlp8Bt%olz%73g^B%(+)TF=Vc45%f`rD7s5~YmVi-S6l@~XAHK)P}E&)ab`!CK$RlKru@2+QO) zDGp=DkvSaz-^`3J{b8MD`0}0>Y*1l z4a%*uY;K*{G6JlWraJ+*yMt1aU)steF-2;9m^siY&v@g_qukeAUwB(voC_$l`#>`9Pzi%tFwk^J-V9+p3FXjTp+ z^0lppp12Lv)MvqooiU+KxlQ0THv=GGNNF<1Kuh6%u#a@MsY=x!G8a}Ml0O3imCRx_ z?Ns-pm>^^yGOxOHD{#0JxJW=zr%-1NuR;35rL%YUr$ZtPfNy`WUfZ@yrDJmO`XvxB zXS5`X&(N|Lns5VFySnUumx*B!>A;j`R;zr--teL^Qf0kU_ax$L+KxC&^TZ-P_H zFhs_6_nkM#>Y_~mvs#o~!wqa3+-P>)QGx}wtp{nTi#Xeh=r`Og z5X?JHI4pR;m@pfjug(v`X}Yl?0DEx78`5z8^-p8DsP$&jv6O&Mz!HohS1t)yIg&r1 ze9&$$-Pf>9)j%dpr$zmh;#lxss2!Xb5;_SSysBU(KEH6x++GcNLomK>1~^r`X6$z~ z0RFs*3KLD=x%DK-$`ul7{i=YUabBjN`Q-8Rl7XlsF)|M5HJ@RBG@0`;+#1|k>tBB? zNacl--%BGPg>w+Fw%eHUw{v!vrPkl`w7=p=CfCI9H?rT24|udb z7?c#8ynOJ6d zVa~>d`O~-U`%cIL4XtR!y^yE0eww6bd}4##_*_iG6hKrjwjzY(!Gh=Kwab4mJX7}0 zHxk`fWj;M;sstkw)DP$cuMLBe8L__MDiw3*&snJ)m;=teQw(n))@dR3i}7-6~@J@4bjH#Ydd0`Y^H=F zISr>25-uT(D%5ZyoX{_-aCaBNxiF(4D6uWwK#K1FxuwYZj4fJ)9oS~r29^2`dCdyj zpYSQMxWRFV?zMchadz@ejCk__Y4XS*gSnAf1%u+uKC!0U0g7;iuoN};vH`nGu5@Wj z-B79p%Ja{~>d4U>_F+`quLDnLfKKZ2^12AZ>-_CHy65_S>+p55Go~q~YL)NrdAliV z*p(|ay8~N)1b8viGoh!b^UuBNUphx`ks`DYXT*r)4WxsAq4luQE>iPV+?$QMYpFzd zF`#Kpe(tR5X-2h~n>?>S#!fMf zo?Oze5V>zA3U0GoK(!enM39B0mQ70pLHQCsx-ivP93!%IT12h2szgnmH`{znkjGAk zY|L~)Rm>8X5V7wssHqpZrK2Zx znRTVoKw3Vb10*hWWu_h$g{@(>1Y2$3$d5+?3oES>XV~m2dLEUu#22R~7PZ~ay@U7Q zc4IODU3HnpEnIc0;Yev$YAk5}E~jx+_jY>h4M$b7`Saf8NVkIUGxPn zOHRgYg7i2neZ0c{L7eKiQSz!59!INxe?AVe;ejr5FJ=$nL{QlEN)v5NBq1(jAf2!o zlkibzN1wPtv>%bYN5qz{FGpqxbT3vOt}H<7NA7qyp|ck;a>fz&;kZX}q#$|z0`s9+ zjRyxo@u;Z6JeWL>bHXfK*sg8dLb!_wiWDW(-$Co^>|fXu&X#6GoJ)PTSQB<&^XwO5 z_`Ct>OzPIgS}969*`uu>^;|<*rW->2q)(j`{Xr)*eg7jB>k6VECwFLPdI@S}`@$-U zd>U5yJ+86F;LO&=J!jcAMloQQ=wnkk>`Us=tkm86Yrdw&_4mC?z1MT7c+U#E?OtkN z=*Gz*7eaKEHp6)1wIW33hg{CcQMF3aKJ^b59JF;vC=GSp*K4kCbQXWGp$P27JfwB^hd)Su!sNA7Q8^T?5VEVY?KB6(B)kNp z7g6C&oY2pz62Q+JU!Qgl1#{3z>5e-bo`xU&tmjxSrqF$^<<7DR4?{aBo~S(E!b)g# zXqo8>Rl@8ZKWN*)!%=mOh4l}kDr*q*#yxANuJ!vkepPzlfEUzM4vl>z;sTB z-p3_(*}|Jj9dbOhwMjSnFT6RoaLxeMz*YVOp|5UR>F|tV0s&&f>3tXRfiSlDY9Hir z{l0@RHJD01;bdy>K2vkq(~eiKr(3?LL_{ritD|%uasmZ}4$bbo@b-3g&74lJl*uSz+$J9ZWYbpudD4cL5i@$ajo;=O!Sv!8a^) zVGYS;sS|UaGCYsRK!Y7eL}Py7pt`iJpsZhM$%h9AoaKLEGn^_fm~U6YCQ&Y@PL1o_ z(L~uIALOuy4iMN&UFH;syuEiKVzh1}er_T^IcZua?hpT%=|*9oW2DT|g^gMWm+S~- zI^1uet81z|+b$ZZ?^x1Far$+OKblV<5j@%zKjqt3vI!(ej~&IThCfuj4>k;PlL?38 zrbHF+Vhv)5FYoB2W%Uvh(M6iNj8UFA=C}Uh4vZDo;W>fAd>$m<>3$@a!2 z`wJ~cmGxf@4k!69VfE>#I&;fUf#I-x9ZIwQk!atI{v#Fn!ls8dC^n3%b5Y&Q3C=kD#tM?8ZYL3xAbhfN=P?x1OAmr8xcp zy5dxIzaICUqWGM)-u89Q? zT*$?5GEC5#jj}+yJi?(O%VDi;w>4H~HU_)vSMfB3{65Wr&bgy}=I+Dg+bV&V z^@&r0x5>yoJ*5K&j{D(nWP3$(#Dsp|{T}Ywt1~!5tq=A0_qdXD^PW*B_PDbiz)>Iv zqcbCe=EDc6fuHVxnT%&;D_z-Jw9d*5eJ1iYH+ygW(w!q*sCpXZl{NDRzGhch_&AVT+9E4J`KpwHYa+9udw1cn^n!j*DaE9u# zJ)g{byH<^Mz~+MenVJEV-s0giXN)GYUPX>3X8js0d7PI8tRJYAx0-#TxskBHUjNfb znd{+?ae`g9ScLJlmx_l0i}NSn%dS6q^VGc`xp!Ddb)r@})ai$Kr2 z6lIsYPI=wpS;VgKIOX(?RW76~)Rkf4?CRC0nRt5w$bw3Ln)vIcj_%V%_v95YSi6@Z za@OvMZ5?k-wWDEl6iU^KI3eA}7UEl#sM{%_YkY~Q`^g|35m-xnWkwg{vBVGbsZJGV z@%yk18UpAncjZ{RNjzF)jOy0IxW%Je!*g><$ZNWSwS6E|3Mw`quBeZS>^8;>DI+Y2 z0*@T&0zaNn#joM91KDf)OgMpS>N!*wPxx)6Ty_#kIGgsY@ji&9dRkp@(kk8D2C1Jc zFdRCYI;(%C1`)p6XW!r!qLI3>+trYqa20w1gV@TpyxPn|G_&f|Xk`l3~#Y#(1F;EA(va z@{?HC*p!QlH8PwrATA?on?A}yO;AKBVVMZh{_vl*oVE1I*|LnnEhFth)76d9c4Hj! z`rN%^B7RLrezyh(gxKo=;ea4olq?6`d%5#%d_iX!2}Y){DmW%dXnw0?c=E2BlN+yEfqT>9s8&m2s+=}3wOX!`;$Vh+fQwsvJ zuHe&xOU1)tCu3OTLZDHi{D}_rsjaoZ~Ar8o?O`fBCdP?8Rl|17gnt*SS^+;-$_#?@=Dmdoq>zRJZ9` z{#brcy{kZTb43y8(|Z~3K9;tudu|}Y9*vMoM$Ix1koiY{@u;Yx`g;qYgUft)&^7if zEGYD0Umv(nSFm-~^J5Tr(fW1?sfyEgmlG{yGq-T;6vzD9NLna71>$8acz;x&v7Y-a zs~~@Dp&lAhUh30$ch zEFf^Y=7Y{Iuxo7{tFCOgn4_}a7;>liaz%5{8M_|gBO0^vUGt6Zq>s<=6cNuLo>jKXZLO81VXe)aw_AUcXFx zJ#^~zaMkOP=GU(}UXMP1{d)5Co1d?xZ{csol;4aSym@E)=Dq8iiGVkgQExsRdh;>u z&7{_7iTL7F^PA5dZwi`CDrf8HJ2u=N**Q%-7k;q z&x@JUAt%2-x6jMD91s_5KPLlMKLbU+r`W7Vr(OQ8O8qZyi@mMK@m3eY5vCZK1p)IiCn67+B`?C6`G#{`rWf$`2B<9qLv-`%ubdB+w&Wl)ec z&?g)gi4RgzO=##wbxP{f5lE%r(r;-NnKt!lv}?Jav-3%Xma>@!B! zN4TAgkY7Fd73uVU0<9Nk4WU2eCJm$p!ZcXc!C$lX0hU@O89|S%?QaWkAka%w!J%-_ zHF`6f0Lp=8yxD#|XDM=BcvG$bq<09NtnwIdJ)jC0o9bq&)HJ6Op7HT7M~|XpgdlW1 zD8uf_aZT5}KYj?1^*V2~R3lm*U*dim)r9;4T4f)vGAJloHwolu=K>JKLx7uN)rPK> z$AInhw2Zj|jfDQxB&bXvx;3FO%LhbH7i778JXx)wGZw=U?BNhm^l1IFvzsedA5Q(7 zg?3N)pjRBbXD@bmr3;!EzlT? zZn%+g^2OSM{Wp&OGQd&feGq(%fzaia3~O&S>B#Lgohw2RCI;E;6cKtGiPB+ zMbA>17dFE(_TjaY7WkX}*}LpiK?Krc1X>aNKrTsnUtb{yU)TZ!oEzI1W8O5+eXk6{r13)^71+uS6KiEbMd8`^Yj>y*)oG#A1q(ZNB z*w4Xp*da>7#pWJVJapV{T~v zjF6Vl_nk9KuB^Y0KM)XbYn;v0)7O!CeE2dZTG3mIyx1$Wo*5pzq=>!5wx_@K8|&v! zaSsknvdNb})Qhc5jpG?y3(5dzfqhO?SiJpAOO3l>njnVZ6&2y5MCsy0?-Q`D4Xq$= zmKUcAI#KC}9r$n6EBwRjPvtWWuP>(6UCKy4wD}8@XF(oi<1OY)SCMjppSM&MrN|c( zFX}roBAjh=648O(aAVLsA64AYaw=|#Z?#9Lb_<3f*(wztmeI%HOS`4HpfC=3p8%zY z&o|5)$sE3ljwJYL4YNTRRk`&g!9EDvUFUTBPp48q3h5~P+`xeq^}1=&es{)3Zv%`W;cF^m>8a+8-a-y~JL4>HdP@2{q(VoH(t7WfUwU79OQY*V+%lslt~sv7t$EvtWaFX?2uPwSE6&rdEQ zQ}6=A$J3LfDjj%62vEXd&22QyJTZLuES}AdhS3sFCa-_Y_n77j)ps68eYS*FS3k?G z;R4a9$xmq;D?ewIYyPd{!Iz-Z7AU21vh z=h{l!lI^D7nsXmuMl4C07dm$%3U)hC^VrmK8n|*MynnKe?{2;$e5;?5#1e@4xv(a) z+&J^0)qrMDeRk(HhIIUj8uI;p8Tc_XYt)R+3#mVLS4Y0P1@x`Sb z@hRYA<=<298pS5bYizdq3@_e$F_3$#O^$YSPfApp` z@_>v}P$2AYBHR_pK0Y5)xYHA^PCAd4acc^evz`*>C)vi=_r)QIA4OE`Oeax5JnYUB zC_MHF_tsyb!4%cK@%>pOx5{ugOLO{#L-b|gz)2ay3U-ds_yQwKU2b?d5B@Wnk>~b; zC&`~Ph$)lz#;mW%#=;k#s>pzSc=A%`J3_G{#RIQ^r2teIHU=^fMYRQ$(}YxWpT092 zdKnA_SJ3$A>Ft;er1G?p=8Gar#A07X^gLzFsdRN-O{MwPTR_qo`3+)cCsWCGDioFn z*#m8I&4X=FZW23NCOzy6hIRjVS_elDJn|};zGZUl?$VM;eH-Si0aZkfMAOo6l8w|I zc0Yv50r<%0$}0Fo6wRp#=vQw~{$tiMo@sC1I$5ocvcY=)im~0ev2kl4uE)|}AKo9v z+3-sR#t%8x02Et$*|M9M zSJZ&Cm{b@>1{G@idJScJDGw?%BQ1v0t&9$lK(!e)81Ii61US!S~jG zEIYQceyTjnX~Uj9oX!*WUt2XU9i`s+ykbAwCgbcd z*OB^a!{;clDVMLG|#|)QiSy3S&_7N##oPi?DIpxiJFiZ~Nh{;BLjW zxb+&lCGf2q3fZAGM#?X04E32HbOk*RO_~e#!Q+5;Zj!X#FFHG-<@=TkjC5vL#GGz{ zl~o7yrcY&-2le!m!(Kxh4n4MsBx39dT+BKI(N}7n#y6Tajofm|-Cd zKfOTm#+tNRC_zz$8LBGp($P~a#1z;6hUq6LF=b50ebN0aFVwBNVGh1+;lbh9dZ^ot zDc1u($}qb{-BMlki37)N1d8K|fA2aK0;MQ-%MwSU~#s^mZB5W4Janeu)z`I`kWBz$3b01?dsVQ@U)^_brQC&uIK z6zN#8G>N5oiMsX53JrcRZO9MG;>bk_h~LKCWy)z4iywdEDHgXWq2r%KWn|;bMaQ`xbH{OAmMpNK6J?sPzfC%Op#I7 z3G-kbW~fUTSd1lgu&v_sG(L%6fE!CZH?)SLKY(Tu#1|l)zVx8NoM(vqU}5_Y`JxlY za9F7i4-pcbo%oKbOjIaMwF`w*g%nYPf`PP=K?CSex?3G2?~(rpomZevs9Zuu>exG- z6dTkY!!Q6Yy}(mACNEhj&nd%C)TtMvTyd-Ji)H9G+%V$CyND~<^ydWWXpJgS+a-q+ zSwSG!2nZ`a%^YR~4o~v1YrLS4!Hy+9w9Ag7bpR@4*k)NHCbh{2=ZbG3d=rCd$<&E# zd{%gBQ9U(LQz2e9iR*}mE@>yo*22Pw;1ej5l~sLu4HMQ~?J<;n7kVa;rfOT9hJ4Uy z$xUSKkx^@Ne#1Z>j+Q)uht+omwXF`y(ZST&*R-HM4ucXY0nlWtUQwQ$M;x)EMVE#P z5!$^Cj77;mkV_%&2{It|ATqM3k_O>s4TofW!)Q(M|`TI zzEes#q=rfbjd^XrUYvgFo&YgaZ+uVALXiBy zV%43JyF!mqmC=%Of!f*$A2@d=*W3L;uEh+9G`?2e)4uT;>oT=C$8NUVr2z zY$GUcg3*KiVv|tC#Bs9V@ z*=DD3znQKs#zb&J&LsyYZHK5iJaDm!79PS^jwL0bbXM|yAD%7Of?Ga7U-`sMtf8*g zY0h$BG~f!2qGWVt)zu!{&N|Rid2MA8IVqJH*oUI1YJ~@PKeG0h?^KPv8oR6enaO_O zjB=!GXhAU5Ne^+W%Me99DlCKc^g!#3R6p=dRNVvT7AoZH zrh-V1H4>NfNa!{YEtS+|5-IhfPI-F{o~^A?191E|pWc%38gX7q`h`Qz-C#G#knhoOq;hnTun<%geZH0BBG1PHJhpGr5?Gd!Ral){CTp z`q;tC3@U8YB~+4zr$YwCN`>u8-7kJTe&rH6?%Z;oaA!QHPmB@JG-`I!|wH?zZ*+hco+$aZI`*2_|R+YZ4o9)O91Xc4^J4`NjRCQLkwnOUv{Y1m%v+b_!&Lq603vAsWsN4~{E)h&C zVnqcg*~6qD>m7s+(j({IG$$vz500m~1d_{ccT}ePj#Awg)LeYM!jejJg$^XbW#Uj@Ov2a8z>#wUV<2C$VyP z>+D~uPdFRqD8Wb3Ki}z$^C0%oxr?p()&d00gG&X0eaasYc{}L(T(y?AD1Yz@xYAAk z?Lz^9c4rfnBHpOXJLv;bASrj@>1$V@9(c(wGYJv}zZysl_7_zE% zka?3Glg@WIWKwsKR(tl}cF$@Uga_FuSWV~}3XTSYmWJm3(hQhz8yr+E+R%WcY8=Hs zQRiYI>yjj&FVhWdYjmgj%b{vGV0DR7?*m**2dQWm=?5n&q*MORPU@Xe=TW~~rJ+=o zi$$c1rH>2Ukv{J#(opA_Y-Z_KK2kE{rR4M*Tf%hu#9c<-#~CmJiBFCLV|dYfCd3Tc zHU?A6b>0g*w2p)#=55+jmZR>hr`dcd+l%im5}=DVJ`sGbtv4#4lKB&sC>C z`rDorBRNZ@gHUetg+XVg);1D9!7Ij7km=UV0xxHQ88A`nkb#Qi(kvJ)jka;}ALJY@z4o;~CTy#(?~nSR-ccfC=rX{YCETllc!>$xHx|eqL?uza$@|)VHFE8|gu%1-uex2qoXG&l0)5c3K zGxCp&=g+UgYU{S~)Se$)s9yW7|A>e(+mrjmX*1F6`DO9LLCqr$qLLunJN^-jE(-N8nY^s`oyVKo?G z#P!06+2mC@BS5{eWc!#UOcE_^-!n*ODI>1#3_`4M{Yx}mDKA|BuHh~EheMraaqUK1 z&A)6uS;iSitOH#*I)=$Mh<1>eH>x&goXKC^1s${2oyO6+W#5m#+ah{S^O_ivcZF&1 zTxDae#Y1j%KP#@}roAv%>b z?G@;eGz*l*V~mhZ>U4$fjdoT0Le1y|H5k^Ln(8YNul4cdZHt6uSX#qe zZZul+v%Wc+b|vd+7epm#&{8hC^|5{5bl6_xJ?evQQI)7p?V>WJ)t{b= z3>8a$s7M^hS(*+3M*?}@rw;}EVIf_(roGg) z()(Df&j?p$2fn$dco_<_8#jN>i|FCszV%yA=`t**5AstJoCk~^ZRu1K@~;JXg3hh{ zh`l`AX3V3)008m{i%zpcd(US;Kj%na=c=(`x;EtcGLKKiC1$UFwAWo5`!TX+M5%w>WtfGZs+N{}0TVvi_~BtC+D< z-A@7f)`(}9Y;W;SOM+Ho<^KO=2XOv@8T-%dfcr&>|6m7vj6GW@y=JmsTJ(HZJX}to zpl!dSs-24Xda9kf-lpcv7M!Qxr7S)?dOSoewk0e?CVe3+gi67W3l1J4NaUQ#TU5+W zz-GC8^Xkbiu(e9y`#|;Hma&sDSB7+jF|V1>yuDf(6O?~;vm1k)L3)t)|Nggl)Q{cQ z{eP#w$=1HtVdeezNQYH7JMp2%|Kw)>&-Azd66tVNtNQvr__7#bGE*;6 z**nvqek*gPv3Kp~nd^OSCSRHcw{9ZJmWACeQQJ$RyV723f6Q@z5V(qTcoE)MLYTVo z`Rnc3&i_56gQd6ezakxG=DPk%`rGxJ{=GKHwVxaQ`S#rgFQ8xsq<^0gXWz=ILZV>hy6@j+m=~I(h~sJbTpgT|_d;9wyAE8au`SAl yAZvhj`RY*H6#rBy%NZhQNSs3y+O2_gPBCWBW7+?_(hiXNf2@=b`#)K!{r>|k+1F?Q diff --git a/dotnet/website/images/articles/CreateAgentWithTools/single-turn-tool-call-with-auto-invoke.png b/dotnet/website/images/articles/CreateAgentWithTools/single-turn-tool-call-with-auto-invoke.png deleted file mode 100644 index 27914072b2..0000000000 --- a/dotnet/website/images/articles/CreateAgentWithTools/single-turn-tool-call-with-auto-invoke.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f0d8e2ab194e31dc70e39ba081a755c8e792d291bef4dc8b4c5cc372bed9ec50 -size 215389 diff --git a/dotnet/website/images/articles/CreateAgentWithTools/single-turn-tool-call-without-auto-invoke.png b/dotnet/website/images/articles/CreateAgentWithTools/single-turn-tool-call-without-auto-invoke.png deleted file mode 100644 index a0711e505e..0000000000 --- a/dotnet/website/images/articles/CreateAgentWithTools/single-turn-tool-call-without-auto-invoke.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5f2e632fb24641eb2fac7fff995c9b3213023c45c3238531eec5a340072865f6 -size 202768 diff --git a/dotnet/website/images/articles/CreateUserProxyAgent/image-1.png b/dotnet/website/images/articles/CreateUserProxyAgent/image-1.png deleted file mode 100644 index fd467c44af..0000000000 --- a/dotnet/website/images/articles/CreateUserProxyAgent/image-1.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:91813a034edc3918a27758296d77150d1c8d650911847bdc6a42cca79307714a -size 9009 diff --git a/dotnet/website/images/articles/DynamicGroupChat/dynamicChat.gif b/dotnet/website/images/articles/DynamicGroupChat/dynamicChat.gif deleted file mode 100644 index d756f67411..0000000000 --- a/dotnet/website/images/articles/DynamicGroupChat/dynamicChat.gif +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5cba3069e9669a1b8013f0b2fa4d191c1d7b0b7919b1664f1f8ec98a90c7a2b2 -size 411517 diff --git a/dotnet/website/images/articles/PrintMessageMiddleware/printMessage.png b/dotnet/website/images/articles/PrintMessageMiddleware/printMessage.png deleted file mode 100644 index db31ade0de..0000000000 --- a/dotnet/website/images/articles/PrintMessageMiddleware/printMessage.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7ec3bc40d4e3c1228d5799e448a34521998e7abb700bc978afc790389805ecb4 -size 86924 diff --git a/dotnet/website/images/articles/PrintMessageMiddleware/streamingoutput.gif b/dotnet/website/images/articles/PrintMessageMiddleware/streamingoutput.gif deleted file mode 100644 index a2afd4f584..0000000000 --- a/dotnet/website/images/articles/PrintMessageMiddleware/streamingoutput.gif +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:95feb667fe74177506435ca52fcf183fb187a3a407fac0b3b220bd9e8da721c7 -size 547023 diff --git a/dotnet/website/images/articles/SequentialGroupChat/SearcherSummarizer.gif b/dotnet/website/images/articles/SequentialGroupChat/SearcherSummarizer.gif deleted file mode 100644 index 250bf00b8d..0000000000 --- a/dotnet/website/images/articles/SequentialGroupChat/SearcherSummarizer.gif +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c6d8a5a534efaf49ecc796ad3ca8e62fb7a236b55d894bda7a0c258564195b5d -size 620269 diff --git a/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/FinalStepsA.png b/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/FinalStepsA.png deleted file mode 100644 index 0403a8cf97..0000000000 --- a/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/FinalStepsA.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:491f8f538c55ce8768179cabfd3789c71c4a07b7d809f85deba9b8f4b759c00e -size 42329 diff --git a/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/FinalStepsB.png b/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/FinalStepsB.png deleted file mode 100644 index 03a68735c0..0000000000 --- a/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/FinalStepsB.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e319fad11682c46c3dc511e2fc63e033f3f99efb06d4530e7f72d1f4af23848f -size 31528 diff --git a/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/FinalStepsC.png b/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/FinalStepsC.png deleted file mode 100644 index 7326ad14d0..0000000000 --- a/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/FinalStepsC.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a8024b5336615e8c2c3497df7a5890a331bd5bdc7b15dd06abd7ec528ffe0932 -size 70169 diff --git a/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Step5.2OpenAIModel.png b/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Step5.2OpenAIModel.png deleted file mode 100644 index b2b7481bbe..0000000000 --- a/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Step5.2OpenAIModel.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:911f2f7c1ab4f9403386298d9769243c0aa8cc22c6f119342cc107a654d1463a -size 44041 diff --git a/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Step5.3ModelNameAndURL.png b/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Step5.3ModelNameAndURL.png deleted file mode 100644 index d1c19f3008..0000000000 --- a/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Step5.3ModelNameAndURL.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ec10a48ed3f0a6d8448e0ce425658f3857c2cf89e2badef8a8d3a8c3744fc3bf -size 51944 diff --git a/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Step6.png b/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Step6.png deleted file mode 100644 index 67c7344544..0000000000 --- a/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Step6.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f016faea51f64af3970fde41ac95249c4e0423b02573f058c36dc1e6ba15562d -size 50669 diff --git a/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Step6b.png b/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Step6b.png deleted file mode 100644 index ebd19bff04..0000000000 --- a/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Step6b.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4a23cbbf5d3d24eaf1da9370e0914f186815f2ecbf46131d2fd6eb5ff3264d96 -size 22569 diff --git a/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Terminal.png b/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Terminal.png deleted file mode 100644 index 9edefc3aeb..0000000000 --- a/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/Terminal.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:97328776c25fd0a61c76065db379406d8d3c96bd8773490c34c168cd7c69a855 -size 58527 diff --git a/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/TheModelTab.png b/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/TheModelTab.png deleted file mode 100644 index 55e7bd8626..0000000000 --- a/dotnet/website/images/articles/UseAutoGenAsModelinAGStudio/TheModelTab.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1d7f4f3a772278e6de320a3601a76f8a9862cab4a9c0da03fad3058b86fcfaf7 -size 45260 diff --git a/dotnet/website/index.md b/dotnet/website/index.md deleted file mode 100644 index 164e5c1cf8..0000000000 --- a/dotnet/website/index.md +++ /dev/null @@ -1 +0,0 @@ -[!INCLUDE [](./articles/getting-start.md)] \ No newline at end of file diff --git a/dotnet/website/release_note/0.0.16.md b/dotnet/website/release_note/0.0.16.md deleted file mode 100644 index b9a190c5f7..0000000000 --- a/dotnet/website/release_note/0.0.16.md +++ /dev/null @@ -1,32 +0,0 @@ -# AutoGen.Net 0.0.16 Release Notes - -We are excited to announce the release of **AutoGen.Net 0.0.16**. This release includes several new features, bug fixes, improvements, and important updates. Below are the detailed release notes: - -**[Milestone: AutoGen.Net 0.0.16](https://github.com/microsoft/autogen/milestone/4)** - -## πŸ“¦ New Features -1. **Deprecate `IStreamingMessage`** ([#3045](https://github.com/microsoft/autogen/issues/3045)) - Replaced `IStreamingMessage` and `IStreamingMessage` with `IMessage` and `IMessage`. -2. **Add example for using ollama + LiteLLM for function call** ([#3014](https://github.com/microsoft/autogen/issues/3014)) - Added a new tutorial to the website for integrating ollama with LiteLLM for function calls. -3. **Add ReAct sample** ([#2978](https://github.com/microsoft/autogen/issues/2978)) - Added a new sample demonstrating the ReAct pattern. -4. **Support tools Anthropic Models** ([#2771](https://github.com/microsoft/autogen/issues/2771)) - Introduced support for tools like `AnthropicClient`, `AnthropicClientAgent`, and `AnthropicMessageConnector`. -5. **Propose Orchestrator for managing group chat/agentic workflow** ([#2695](https://github.com/microsoft/autogen/issues/2695)) - Introduced a customizable orchestrator interface for managing group chats and agent workflows. -6. **Run Agent as Web API** ([#2519](https://github.com/microsoft/autogen/issues/2519)) - Introduced the ability to start an OpenAI-chat-compatible web API from an arbitrary agent. - -## πŸ› Bug Fixes -1. **SourceGenerator doesn't work when function's arguments are empty** ([#2976](https://github.com/microsoft/autogen/issues/2976)) - Fixed an issue where the SourceGenerator failed when function arguments were empty. -2. **Add content field in ToolCallMessage** ([#2975](https://github.com/microsoft/autogen/issues/2975)) - Added a content property in `ToolCallMessage` to handle text content returned by the OpenAI model during tool calls. -3. **AutoGen.SourceGenerator doesn’t encode `"` in structural comments** ([#2872](https://github.com/microsoft/autogen/issues/2872)) - Fixed an issue where structural comments containing `"` were not properly encoded, leading to compilation errors. - -## πŸš€ Improvements -1. **Sample update - Add getting-start samples for BasicSample project** ([#2859](https://github.com/microsoft/autogen/issues/2859)) - Re-organized the `AutoGen.BasicSample` project to include only essential getting-started examples, simplifying complex examples. -2. **Graph constructor should consider null transitions** ([#2708](https://github.com/microsoft/autogen/issues/2708)) - Updated the Graph constructor to handle cases where transitions’ values are null. - -## ⚠️ API-Breakchange -1. **Deprecate `IStreamingMessage`** ([#3045](https://github.com/microsoft/autogen/issues/3045)) - **Migration guide:** Deprecating `IStreamingMessage` will introduce breaking changes, particularly for `IStreamingAgent` and `IStreamingMiddleware`. Replace all `IStreamingMessage` and `IStreamingMessage` with `IMessage` and `IMessage`. - -## πŸ“š Document Update -1. **Add example for using ollama + LiteLLM for function call** ([#3014](https://github.com/microsoft/autogen/issues/3014)) - Added a tutorial to the website for using ollama with LiteLLM. - -Thank you to all the contributors for making this release possible. We encourage everyone to upgrade to AutoGen.Net 0.0.16 to take advantage of these new features and improvements. If you encounter any issues or have any feedback, please let us know. - -Happy coding! πŸš€ \ No newline at end of file diff --git a/dotnet/website/release_note/0.0.17.md b/dotnet/website/release_note/0.0.17.md deleted file mode 100644 index ad245191e7..0000000000 --- a/dotnet/website/release_note/0.0.17.md +++ /dev/null @@ -1,45 +0,0 @@ -# AutoGen.Net 0.0.17 Release Notes - -## 🌟 What's New - -1. **.NET Core Target Framework Support** ([#3203](https://github.com/microsoft/autogen/issues/3203)) - - πŸš€ Added support for .NET Core to ensure compatibility and enhanced performance of AutoGen packages across different platforms. - -2. **Kernel Support in Interactive Service Constructor** ([#3181](https://github.com/microsoft/autogen/issues/3181)) - - 🧠 Enhanced the Interactive Service to accept a kernel in its constructor, facilitating usage in notebook environments. - -3. **Constructor Options for OpenAIChatAgent** ([#3126](https://github.com/microsoft/autogen/issues/3126)) - - βš™οΈ Added new constructor options for `OpenAIChatAgent` to allow full control over chat completion flags/options. - -4. **Step-by-Step Execution for Group Chat** ([#3075](https://github.com/microsoft/autogen/issues/3075)) - - πŸ› οΈ Introduced an `IAsyncEnumerable` extension API to run group chat step-by-step, enabling developers to observe internal processes or implement early stopping mechanisms. - -## πŸš€ Improvements - -1. **Cancellation Token Addition in Graph APIs** ([#3111](https://github.com/microsoft/autogen/issues/3111)) - - πŸ”„ Added cancellation tokens to async APIs in the `AutoGen.Core.Graph` class to follow best practices and enhance the control flow. - -## ⚠️ API Breaking Changes - -1. **FunctionDefinition Generation Stopped in Source Generator** ([#3133](https://github.com/microsoft/autogen/issues/3133)) - - πŸ›‘ Stopped generating `FunctionDefinition` from `Azure.AI.OpenAI` in the source generator to eliminate unnecessary package dependencies. Migration guide: - - ➑️ Use `ToOpenAIFunctionDefinition()` extension from `AutoGen.OpenAI` for generating `FunctionDefinition` from `AutoGen.Core.FunctionContract`. - - ➑️ Use `FunctionContract` for metadata such as function name or parameters. - -2. **Namespace Renaming for AutoGen.WebAPI** ([#3152](https://github.com/microsoft/autogen/issues/3152)) - - ✏️ Renamed the namespace of `AutoGen.WebAPI` from `AutoGen.Service` to `AutoGen.WebAPI` to maintain consistency with the project name. - -3. **Semantic Kernel Version Update** ([#3118](https://github.com/microsoft/autogen/issues/3118)) - - πŸ“ˆ Upgraded the Semantic Kernel version to 1.15.1 for enhanced functionality and performance improvements. This might introduce break change for those who use a lower-version semantic kernel. - -## πŸ“š Documentation - -1. **Consume AutoGen.Net Agent in AG Studio** ([#3142](https://github.com/microsoft/autogen/issues/3142)) - - Added detailed documentation on using AutoGen.Net Agent as a model in AG Studio, including examples of starting an OpenAI chat backend and integrating third-party OpenAI models. - -2. **Middleware Overview Documentation Errors Fixed** ([#3129](https://github.com/microsoft/autogen/issues/3129)) - - Corrected logic and compile errors in the example code provided in the Middleware Overview documentation to ensure it runs without issues. - ---- - -We hope you enjoy the new features and improvements in AutoGen.Net 0.0.17! If you encounter any issues or have feedback, please open a new issue on our [GitHub repository](https://github.com/microsoft/autogen/issues). \ No newline at end of file diff --git a/dotnet/website/release_note/0.1.0.md b/dotnet/website/release_note/0.1.0.md deleted file mode 100644 index dc84408775..0000000000 --- a/dotnet/website/release_note/0.1.0.md +++ /dev/null @@ -1,41 +0,0 @@ -# πŸŽ‰ Release Notes: AutoGen.Net 0.1.0 πŸŽ‰ - -## πŸ“¦ New Packages - -1. **Add AutoGen.AzureAIInference Package** - - **Issue**: [.Net][Feature Request] [#3323](https://github.com/microsoft/autogen/issues/3323) - - **Description**: The new `AutoGen.AzureAIInference` package includes the `ChatCompletionClientAgent`. - -## ✨ New Features - -1. **Enable Step-by-Step Execution for Two Agent Chat API** - - **Issue**: [.Net][Feature Request] [#3339](https://github.com/microsoft/autogen/issues/3339) - - **Description**: The `AgentExtension.SendAsync` now returns an `IAsyncEnumerable`, allowing conversations to be driven step by step, similar to how `GroupChatExtension.SendAsync` works. - -2. **Support Python Code Execution in AutoGen.DotnetInteractive** - - **Issue**: [.Net][Feature Request] [#3316](https://github.com/microsoft/autogen/issues/3316) - - **Description**: `dotnet-interactive` now supports Jupyter kernel connection, allowing Python code execution in `AutoGen.DotnetInteractive`. - -3. **Support Prompt Cache in Claude** - - **Issue**: [.Net][Feature Request] [#3359](https://github.com/microsoft/autogen/issues/3359) - - **Description**: Claude now supports prompt caching, which dramatically lowers the bill if the cache is hit. Added the corresponding option in the Claude client. - -## πŸ› Bug Fixes - -1. **GroupChatExtension.SendAsync Doesn’t Terminate Chat When `IOrchestrator` Returns Null as Next Agent** - - **Issue**: [.Net][Bug] [#3306](https://github.com/microsoft/autogen/issues/3306) - - **Description**: Fixed an issue where `GroupChatExtension.SendAsync` would continue until the max_round is reached even when `IOrchestrator` returns null as the next speaker. - -2. **InitializedMessages Are Added Repeatedly in GroupChatExtension.SendAsync Method** - - **Issue**: [.Net][Bug] [#3268](https://github.com/microsoft/autogen/issues/3268) - - **Description**: Fixed an issue where initialized messages from group chat were being added repeatedly in every iteration of the `GroupChatExtension.SendAsync` API. - -3. **Remove `Azure.AI.OpenAI` Dependency from `AutoGen.DotnetInteractive`** - - **Issue**: [.Net][Feature Request] [#3273](https://github.com/microsoft/autogen/issues/3273) - - **Description**: Fixed an issue by removing the `Azure.AI.OpenAI` dependency from `AutoGen.DotnetInteractive`, simplifying the package and reducing dependencies. - -## πŸ“„ Documentation Updates - -1. **Add Function Comparison Page Between Python AutoGen and AutoGen.Net** - - **Issue**: [.Net][Document] [#3184](https://github.com/microsoft/autogen/issues/3184) - - **Description**: Added comparative documentation for features between AutoGen and AutoGen.Net across various functionalities and platform supports. \ No newline at end of file diff --git a/dotnet/website/release_note/toc.yml b/dotnet/website/release_note/toc.yml deleted file mode 100644 index 9c8008e705..0000000000 --- a/dotnet/website/release_note/toc.yml +++ /dev/null @@ -1,11 +0,0 @@ -- name: 0.1.0 - href: 0.1.0.md - -- name: 0.0.17 - href: 0.0.17.md - -- name: 0.0.16 - href: 0.0.16.md - -- name: 0.0.0 - 0.0.15 - href: update.md \ No newline at end of file diff --git a/dotnet/website/release_note/update.md b/dotnet/website/release_note/update.md deleted file mode 100644 index 7c81130ed7..0000000000 --- a/dotnet/website/release_note/update.md +++ /dev/null @@ -1,77 +0,0 @@ -##### Update on 0.0.15 (2024-06-13) Milestone: [AutoGen.Net 0.0.15](https://github.com/microsoft/autogen/milestone/3) - -###### Highlights -- [Issue 2851](https://github.com/microsoft/autogen/issues/2851) `AutoGen.Gemini` package for Gemini support. Examples can be found [here](https://github.com/microsoft/autogen/tree/main/dotnet/sample/AutoGen.Gemini.Sample) - -##### Update on 0.0.14 (2024-05-28) -###### New features -- [Issue 2319](https://github.com/microsoft/autogen/issues/2319) Add `AutoGen.Ollama` package for Ollama support. Special thanks to @iddelacruz for the effort. -- [Issue 2608](https://github.com/microsoft/autogen/issues/2608) Add `AutoGen.Anthropic` package for Anthropic support. Special thanks to @DavidLuong98 for the effort. -- [Issue 2647](https://github.com/microsoft/autogen/issues/2647) Add `ToolCallAggregateMessage` for function call middleware. - -###### API Breaking Changes -- [Issue 2648](https://github.com/microsoft/autogen/issues/2648) Deprecate `Message` type. -- [Issue 2649](https://github.com/microsoft/autogen/issues/2649) Deprecate `Workflow` type. -###### Bug Fixes -- [Issue 2735](https://github.com/microsoft/autogen/issues/2735) Fix tool call issue in AutoGen.Mistral package. -- [Issue 2722](https://github.com/microsoft/autogen/issues/2722) Fix parallel funciton call in function call middleware. -- [Issue 2633](https://github.com/microsoft/autogen/issues/2633) Set up `name` field in `OpenAIChatMessageConnector` -- [Issue 2660](https://github.com/microsoft/autogen/issues/2660) Fix dotnet interactive restoring issue when system language is Chinese -- [Issue 2687](https://github.com/microsoft/autogen/issues/2687) Add `global::` prefix to generated code to avoid conflict with user-defined types. -##### Update on 0.0.13 (2024-05-09) -###### New features -- [Issue 2593](https://github.com/microsoft/autogen/issues/2593) Consume SK plugins in Agent. -- [Issue 1893](https://github.com/microsoft/autogen/issues/1893) Support inline-data in ImageMessage -- [Issue 2481](https://github.com/microsoft/autogen/issues/2481) Introduce `ChatCompletionAgent` to `AutoGen.SemanticKernel` -###### API Breaking Changes -- [Issue 2470](https://github.com/microsoft/autogen/issues/2470) Update the return type of `IStreamingAgent.GenerateStreamingReplyAsync` from `Task>` to `IAsyncEnumerable` -- [Issue 2470](https://github.com/microsoft/autogen/issues/2470) Update the return type of `IStreamingMiddleware.InvokeAsync` from `Task>` to `IAsyncEnumerable` -- Mark `RegisterReply`, `RegisterPreProcess` and `RegisterPostProcess` as obsolete. You can replace them with `RegisterMiddleware` - -###### Bug Fixes -- Fix [Issue 2609](https://github.com/microsoft/autogen/issues/2609) Constructor of conversableAgentConfig does not accept LMStudioConfig as ConfigList - -##### Update on 0.0.12 (2024-04-22) -- Add AutoGen.Mistral package to support Mistral.AI models -##### Update on 0.0.11 (2024-04-10) -- Add link to Discord channel in nuget's readme.md -- Document improvements -- In `AutoGen.OpenAI`, update `Azure.AI.OpenAI` to 1.0.0-beta.15 and add support for json mode and deterministic output in `OpenAIChatAgent` [Issue #2346](https://github.com/microsoft/autogen/issues/2346) -- In `AutoGen.SemanticKernel`, update `SemanticKernel` package to 1.7.1 -- [API Breaking Change] Rename `PrintMessageMiddlewareExtension.RegisterPrintFormatMessageHook' to `PrintMessageMiddlewareExtension.RegisterPrintMessage`. -##### Update on 0.0.10 (2024-03-12) -- Rename `Workflow` to `Graph` -- Rename `AddInitializeMessage` to `SendIntroduction` -- Rename `SequentialGroupChat` to `RoundRobinGroupChat` -##### Update on 0.0.9 (2024-03-02) -- Refactor over @AutoGen.Message and introducing `TextMessage`, `ImageMessage`, `MultiModalMessage` and so on. PR [#1676](https://github.com/microsoft/autogen/pull/1676) -- Add `AutoGen.SemanticKernel` to support seamless integration with Semantic Kernel -- Move the agent contract abstraction to `AutoGen.Core` package. The `AutoGen.Core` package provides the abstraction for message type, agent and group chat and doesn't contain dependencies over `Azure.AI.OpenAI` or `Semantic Kernel`. This is useful when you want to leverage AutoGen's abstraction only and want to avoid introducing any other dependencies. -- Move `GPTAgent`, `OpenAIChatAgent` and all openai-dependencies to `AutoGen.OpenAI` -##### Update on 0.0.8 (2024-02-28) -- Fix [#1804](https://github.com/microsoft/autogen/pull/1804) -- Streaming support for IAgent [#1656](https://github.com/microsoft/autogen/pull/1656) -- Streaming support for middleware via `MiddlewareStreamingAgent` [#1656](https://github.com/microsoft/autogen/pull/1656) -- Graph chat support with conditional transition workflow [#1761](https://github.com/microsoft/autogen/pull/1761) -- AutoGen.SourceGenerator: Generate `FunctionContract` from `FunctionAttribute` [#1736](https://github.com/microsoft/autogen/pull/1736) -##### Update on 0.0.7 (2024-02-11) -- Add `AutoGen.LMStudio` to support comsume openai-like API from LMStudio local server -##### Update on 0.0.6 (2024-01-23) -- Add `MiddlewareAgent` -- Use `MiddlewareAgent` to implement existing agent hooks (RegisterPreProcess, RegisterPostProcess, RegisterReply) -- Remove `AutoReplyAgent`, `PreProcessAgent`, `PostProcessAgent` because they are replaced by `MiddlewareAgent` -##### Update on 0.0.5 -- Simplify `IAgent` interface by removing `ChatLLM` Property -- Add `GenerateReplyOptions` to `IAgent.GenerateReplyAsync` which allows user to specify or override the options when generating reply - -##### Update on 0.0.4 -- Move out dependency of Semantic Kernel -- Add type `IChatLLM` as connector to LLM - -##### Update on 0.0.3 -- In AutoGen.SourceGenerator, rename FunctionAttribution to FunctionAttribute -- In AutoGen, refactor over ConversationAgent, UserProxyAgent, and AssistantAgent - -##### Update on 0.0.2 -- update Azure.OpenAI.AI to 1.0.0-beta.12 -- update Semantic kernel to 1.0.1 \ No newline at end of file diff --git a/dotnet/website/template/public/main.js b/dotnet/website/template/public/main.js deleted file mode 100644 index df5fb0b834..0000000000 --- a/dotnet/website/template/public/main.js +++ /dev/null @@ -1,9 +0,0 @@ -export default { - iconLinks: [ - { - icon: 'github', - href: 'https://github.com/microsoft/autogen', - title: 'GitHub' - } - ] - } \ No newline at end of file diff --git a/dotnet/website/toc.yml b/dotnet/website/toc.yml deleted file mode 100644 index bf4ff08776..0000000000 --- a/dotnet/website/toc.yml +++ /dev/null @@ -1,20 +0,0 @@ -- name: Docs - href: articles/ - -- name: Tutorial - href: tutorial/ - -- name: API Reference - href: api/ - -- name: Release Notes - href: release_note/ - -- name: Comparison between Python AutoGen and AutoGen.Net - href: articles/function-comparison-page-between-python-AutoGen-and-autogen.net.md - -- name: Other Languages - dropdown: true - items: - - name: Python - href: https://docs.ag2.ai diff --git a/dotnet/website/tutorial/Chat-with-an-agent.md b/dotnet/website/tutorial/Chat-with-an-agent.md deleted file mode 100644 index 1fc28a2533..0000000000 --- a/dotnet/website/tutorial/Chat-with-an-agent.md +++ /dev/null @@ -1,53 +0,0 @@ -This tutorial shows how to generate response using an @AutoGen.Core.IAgent by taking @AutoGen.OpenAI.OpenAIChatAgent as an example. - -> [!NOTE] -> AutoGen.Net provides the following agents to connect to different LLM platforms. Generating responses using these agents is similar to the example shown below. -> - @AutoGen.OpenAI.OpenAIChatAgent -> - @AutoGen.SemanticKernel.SemanticKernelAgent -> - @AutoGen.LMStudio.LMStudioAgent -> - @AutoGen.Mistral.MistralClientAgent -> - @AutoGen.Anthropic.AnthropicClientAgent -> - @AutoGen.Ollama.OllamaAgent -> - @AutoGen.Gemini.GeminiChatAgent - -> [!NOTE] -> The complete code example can be found in [Chat_With_Agent.cs](https://github.com/ag2ai/ag2/blob/main/dotnet/sample/AutoGen.BasicSamples/GettingStart/Chat_With_Agent.cs) - -## Step 1: Install AutoGen - -First, install the AutoGen package using the following command: - -```bash -dotnet add package AutoGen -``` - -## Step 2: add Using Statements - -[!code-csharp[Using Statements](../../sample/AutoGen.BasicSamples/GettingStart/Chat_With_Agent.cs?name=Using)] - -## Step 3: Create an @AutoGen.OpenAI.OpenAIChatAgent - -> [!NOTE] -> The @AutoGen.OpenAI.Extension.OpenAIAgentExtension.RegisterMessageConnector* method registers an @AutoGen.OpenAI.OpenAIChatRequestMessageConnector middleware which converts OpenAI message types to AutoGen message types. This step is necessary when you want to use AutoGen built-in message types like @AutoGen.Core.TextMessage, @AutoGen.Core.ImageMessage, etc. -> For more information, see [Built-in-messages](../articles/Built-in-messages.md) - -[!code-csharp[Create an OpenAIChatAgent](../../sample/AutoGen.BasicSamples/GettingStart/Chat_With_Agent.cs?name=Create_Agent)] - -## Step 4: Generate Response -To generate response, you can use one of the overloaded method of @AutoGen.Core.AgentExtension.SendAsync* method. The following code shows how to generate response with text message: - -[!code-csharp[Generate Response](../../sample/AutoGen.BasicSamples/GettingStart/Chat_With_Agent.cs?name=Chat_With_Agent)] - -To generate response with chat history, you can pass the chat history to the @AutoGen.Core.AgentExtension.SendAsync* method: - -[!code-csharp[Generate Response with Chat History](../../sample/AutoGen.BasicSamples/GettingStart/Chat_With_Agent.cs?name=Chat_With_History)] - -To streamingly generate response, use @AutoGen.Core.IStreamingAgent.GenerateStreamingReplyAsync* - -[!code-csharp[Generate Streaming Response](../../sample/AutoGen.BasicSamples/GettingStart/Chat_With_Agent.cs?name=Streaming_Chat)] - -## Further Reading -- [Chat with google gemini](../articles/AutoGen.Gemini/Chat-with-google-gemini.md) -- [Chat with vertex gemini](../articles/AutoGen.Gemini/Chat-with-vertex-gemini.md) -- [Chat with Ollama](../articles/AutoGen.Ollama/Chat-with-llama.md) -- [Chat with Semantic Kernel Agent](../articles/AutoGen.SemanticKernel/SemanticKernelAgent-simple-chat.md) \ No newline at end of file diff --git a/dotnet/website/tutorial/Create-agent-with-tools.md b/dotnet/website/tutorial/Create-agent-with-tools.md deleted file mode 100644 index 75f42e52a8..0000000000 --- a/dotnet/website/tutorial/Create-agent-with-tools.md +++ /dev/null @@ -1,105 +0,0 @@ -This tutorial shows how to use tools in an agent. - -## What is tool -Tools are pre-defined functions in user's project that agent can invoke. Agent can use tools to perform actions like search web, perform calculations, etc. With tools, it can greatly extend the capabilities of an agent. - -> [!NOTE] -> To use tools with agent, the backend LLM model used by the agent needs to support tool calling. Here are some of the LLM models that support tool calling as of 06/21/2024 -> - GPT-3.5-turbo with version >= 0613 -> - GPT-4 series -> - Gemini series -> - OPEN_MISTRAL_7B -> - ... -> -> This tutorial uses the latest `GPT-3.5-turbo` as example. - -> [!NOTE] -> The complete code example can be found in [Use_Tools_With_Agent.cs](https://github.com/ag2ai/ag2/blob/main/dotnet/sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs) - -## Key Concepts -- @AutoGen.Core.FunctionContract: The contract of a function that agent can invoke. It contains the function name, description, parameters schema, and return type. -- @AutoGen.Core.ToolCallMessage: A message type that represents a tool call request in AutoGen.Net. -- @AutoGen.Core.ToolCallResultMessage: A message type that represents a tool call result in AutoGen.Net. -- @AutoGen.Core.ToolCallAggregateMessage: An aggregate message type that represents a tool call request and its result in a single message in AutoGen.Net. -- @AutoGen.Core.FunctionCallMiddleware: A middleware that pass the @AutoGen.Core.FunctionContract to the agent when generating response, and process the tool call response when receiving a @AutoGen.Core.ToolCallMessage. - -> [!Tip] -> You can Use AutoGen.SourceGenerator to automatically generate type-safe @AutoGen.Core.FunctionContract instead of manually defining them. For more information, please check out [Create type-safe function](../articles/Create-type-safe-function-call.md). - -## Install AutoGen and AutoGen.SourceGenerator -First, install the AutoGen and AutoGen.SourceGenerator package using the following command: - -```bash -dotnet add package AutoGen -dotnet add package AutoGen.SourceGenerator -``` - -Also, you might need to enable structural xml document support by setting `GenerateDocumentationFile` property to true in your project file. This allows source generator to leverage the documentation of the function when generating the function definition. - -```xml - - - true - -``` - -## Add Using Statements - -[!code-csharp[Using Statements](../../sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs?name=Using)] - -## Create agent - -Create an @AutoGen.OpenAI.OpenAIChatAgent with `GPT-3.5-turbo` as the backend LLM model. - -[!code-csharp[Create an agent with tools](../../sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs?name=Create_Agent)] - -## Define `Tool` class and create tools -Create a `public partial` class to host the tools you want to use in AutoGen agents. The method has to be a `public` instance method and its return type must be `Task`. After the methods is defined, mark them with @AutoGen.Core.FunctionAttribute attribute. - -In the following example, we define a `GetWeather` tool that returns the weather information of a city. - -[!code-csharp[Define Tool class](../../sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs?name=Tools)] -[!code-csharp[Create tools](../../sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs?name=Create_tools)] - -## Tool call without auto-invoke -In this case, when receiving a @AutoGen.Core.ToolCallMessage, the agent will not automatically invoke the tool. Instead, the agent will return the original message back to the user. The user can then decide whether to invoke the tool or not. - -![single-turn tool call without auto-invoke](../images/articles/CreateAgentWithTools/single-turn-tool-call-without-auto-invoke.png) - -To implement this, you can create the @AutoGen.Core.FunctionCallMiddleware without passing the `functionMap` parameter to the constructor so that the middleware will not automatically invoke the tool once it receives a @AutoGen.Core.ToolCallMessage from its inner agent. - -[!code-csharp[Single-turn tool call without auto-invoke](../../sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs?name=Create_no_invoke_middleware)] - -After creating the function call middleware, you can register it to the agent using `RegisterMiddleware` method, which will return a new agent which can use the methods defined in the `Tool` class. - -[!code-csharp[Generate Response](../../sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs?name=Single_Turn_No_Invoke)] - -## Tool call with auto-invoke -In this case, the agent will automatically invoke the tool when receiving a @AutoGen.Core.ToolCallMessage and return the @AutoGen.Core.ToolCallAggregateMessage which contains both the tool call request and the tool call result. - -![single-turn tool call with auto-invoke](../images/articles/CreateAgentWithTools/single-turn-tool-call-with-auto-invoke.png) - -To implement this, you can create the @AutoGen.Core.FunctionCallMiddleware with the `functionMap` parameter so that the middleware will automatically invoke the tool once it receives a @AutoGen.Core.ToolCallMessage from its inner agent. - -[!code-csharp[Single-turn tool call with auto-invoke](../../sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs?name=Create_auto_invoke_middleware)] - -After creating the function call middleware, you can register it to the agent using `RegisterMiddleware` method, which will return a new agent which can use the methods defined in the `Tool` class. - -[!code-csharp[Generate Response](../../sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs?name=Single_Turn_Auto_Invoke)] - -## Send the tool call result back to LLM to generate further response -In some cases, you may want to send the tool call result back to the LLM to generate further response. To do this, you can send the tool call response from agent back to the LLM by calling the `SendAsync` method of the agent. - -[!code-csharp[Generate Response](../../sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs?name=Multi_Turn_Tool_Call)] - -## Parallel tool call -Some LLM models support parallel tool call, which returns multiple tool calls in one single message. Note that @AutoGen.Core.FunctionCallMiddleware has already handled the parallel tool call for you. When it receives a @AutoGen.Core.ToolCallMessage that contains multiple tool calls, it will automatically invoke all the tools in the sequantial order and return the @AutoGen.Core.ToolCallAggregateMessage which contains all the tool call requests and results. - -[!code-csharp[Generate Response](../../sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs?name=parallel_tool_call)] - -## Further Reading -- [Function call with openai](../articles/OpenAIChatAgent-use-function-call.md) -- [Function call with gemini](../articles/AutoGen.Gemini/Function-call-with-gemini.md) -- [Function call with local model](../articles/Function-call-with-ollama-and-litellm.md) -- [Use kernel plugin in other agents](../articles/AutoGen.SemanticKernel/Use-kernel-plugin-in-other-agents.md) -- [function call in mistral](../articles/MistralChatAgent-use-function-call.md) \ No newline at end of file diff --git a/dotnet/website/tutorial/Image-chat-with-agent.md b/dotnet/website/tutorial/Image-chat-with-agent.md deleted file mode 100644 index 2a871914a0..0000000000 --- a/dotnet/website/tutorial/Image-chat-with-agent.md +++ /dev/null @@ -1,50 +0,0 @@ -This tutorial shows how to perform image chat with an agent using the @AutoGen.OpenAI.OpenAIChatAgent as an example. - -> [!NOTE] -> To chat image with an agent, the model behind the agent needs to support image input. Here is a partial list of models that support image input: -> - gpt-4o -> - gemini-1.5 -> - llava -> - claude-3 -> - ... -> -> In this example, we are using the gpt-4o model as the backend model for the agent. - -> [!NOTE] -> The complete code example can be found in [Image_Chat_With_Agent.cs](https://github.com/ag2ai/ag2/blob/main/dotnet/sample/AutoGen.BasicSamples/GettingStart/Image_Chat_With_Agent.cs) - -## Step 1: Install AutoGen - -First, install the AutoGen package using the following command: - -```bash -dotnet add package AutoGen -``` - -## Step 2: Add Using Statements - -[!code-csharp[Using Statements](../../sample/AutoGen.BasicSamples/GettingStart/Image_Chat_With_Agent.cs?name=Using)] - -## Step 3: Create an @AutoGen.OpenAI.OpenAIChatAgent - -[!code-csharp[Create an OpenAIChatAgent](../../sample/AutoGen.BasicSamples/GettingStart/Image_Chat_With_Agent.cs?name=Create_Agent)] - -## Step 4: Prepare Image Message - -In AutoGen, you can create an image message using either @AutoGen.Core.ImageMessage or @AutoGen.Core.MultiModalMessage. The @AutoGen.Core.ImageMessage takes a single image as input, whereas the @AutoGen.Core.MultiModalMessage allows you to pass multiple modalities like text or image. - -Here is how to create an image message using @AutoGen.Core.ImageMessage: -[!code-csharp[Create Image Message](../../sample/AutoGen.BasicSamples/GettingStart/Image_Chat_With_Agent.cs?name=Prepare_Image_Input)] - -Here is how to create a multimodal message using @AutoGen.Core.MultiModalMessage: -[!code-csharp[Create MultiModal Message](../../sample/AutoGen.BasicSamples/GettingStart/Image_Chat_With_Agent.cs?name=Prepare_Multimodal_Input)] - -## Step 5: Generate Response - -To generate response, you can use one of the overloaded methods of @AutoGen.Core.AgentExtension.SendAsync* method. The following code shows how to generate response with an image message: - -[!code-csharp[Generate Response](../../sample/AutoGen.BasicSamples/GettingStart/Image_Chat_With_Agent.cs?name=Chat_With_Agent)] - -## Further Reading -- [Image chat with gemini](../articles/AutoGen.Gemini/Image-chat-with-gemini.md) -- [Image chat with llava](../articles/AutoGen.Ollama/Chat-with-llava.md) \ No newline at end of file diff --git a/dotnet/website/tutorial/Use-AutoGen.Net-agent-as-model-in-AG-Studio.md b/dotnet/website/tutorial/Use-AutoGen.Net-agent-as-model-in-AG-Studio.md deleted file mode 100644 index a47cb01f64..0000000000 --- a/dotnet/website/tutorial/Use-AutoGen.Net-agent-as-model-in-AG-Studio.md +++ /dev/null @@ -1,84 +0,0 @@ -This tutorial shows how to use AutoGen.Net agent as model in AG Studio - -## Step 1. Create Dotnet empty web app and install AutoGen and AutoGen.WebAPI package - -```bash -dotnet new web -dotnet add package AutoGen -dotnet add package AutoGen.WebAPI -``` - -## Step 2. Replace the Program.cs with following code - -```bash -using AutoGen.Core; -using AutoGen.Service; - -var builder = WebApplication.CreateBuilder(args); -var app = builder.Build(); - -var helloWorldAgent = new HelloWorldAgent(); -app.UseAgentAsOpenAIChatCompletionEndpoint(helloWorldAgent); - -app.Run(); - -class HelloWorldAgent : IAgent -{ - public string Name => "HelloWorld"; - - public Task GenerateReplyAsync(IEnumerable messages, GenerateReplyOptions? options = null, CancellationToken cancellationToken = default) - { - return Task.FromResult(new TextMessage(Role.Assistant, "Hello World!", from: this.Name)); - } -} -``` - -## Step 3: Start the web app - -Run the following command to start web api - -```bash -dotnet RUN -``` - -The web api will listen at `http://localhost:5264/v1/chat/completion - -![terminal](../images/articles/UseAutoGenAsModelinAGStudio/Terminal.png) - -## Step 4: In another terminal, start autogen-studio - -```bash -autogenstudio ui -``` - -## Step 5: Navigate to AutoGen Studio UI and add hello world agent as openai Model - -### Step 5.1: Go to model tab - -![The Model Tab](../images/articles/UseAutoGenAsModelinAGStudio/TheModelTab.png) - -### Step 5.2: Select "OpenAI model" card - -![Open AI model Card](../images/articles/UseAutoGenAsModelinAGStudio/Step5.2OpenAIModel.png) - -### Step 5.3: Fill the model name and url - -The model name needs to be same with agent name - -![Fill the model name and url](../images/articles/UseAutoGenAsModelinAGStudio/Step5.3ModelNameAndURL.png) - -## Step 6: Create a hello world agent that uses the hello world model - -![Create a hello world agent that uses the hello world model](../images/articles/UseAutoGenAsModelinAGStudio/Step6.png) - -![Agent Configuration](../images/articles/UseAutoGenAsModelinAGStudio/Step6b.png) - -## Final Step: Use the hello world agent in workflow - -![Use the hello world agent in workflow](../images/articles/UseAutoGenAsModelinAGStudio/FinalStepsA.png) - -![Use the hello world agent in workflow](../images/articles/UseAutoGenAsModelinAGStudio/FinalStepsA.png) - -![Use the hello world agent in workflow](../images/articles/UseAutoGenAsModelinAGStudio/FinalStepsB.png) - -![Use the hello world agent in workflow](../images/articles/UseAutoGenAsModelinAGStudio/FinalStepsC.png) diff --git a/dotnet/website/tutorial/toc.yml b/dotnet/website/tutorial/toc.yml deleted file mode 100644 index 167baa70e4..0000000000 --- a/dotnet/website/tutorial/toc.yml +++ /dev/null @@ -1,11 +0,0 @@ -- name: Chat with an agent - href: Chat-with-an-agent.md - -- name: Image chat with agent - href: Image-chat-with-agent.md - -- name: Create agent with tools - href: Create-agent-with-tools.md - -- name: Use AutoGen.Net agent as model in AG Studio - href: Use-AutoGen.Net-agent-as-model-in-AG-Studio.md \ No newline at end of file diff --git a/test/website/test_process_notebooks.py b/test/website/test_process_notebooks.py index 08d31d40c2..d5b815ca67 100644 --- a/test/website/test_process_notebooks.py +++ b/test/website/test_process_notebooks.py @@ -218,7 +218,6 @@ def test_extract_example_group(self): expected = { "group": "Examples", "pages": [ - "notebooks/Examples", { "group": "Examples by Notebook", "pages": [ diff --git a/website/docs/Examples.mdx b/website/docs/Examples.mdx deleted file mode 100644 index adb3f997b5..0000000000 --- a/website/docs/Examples.mdx +++ /dev/null @@ -1,134 +0,0 @@ ---- -title: Examples by Category ---- - -## Automated Multi Agent Chat - -AutoGen offers conversable agents powered by LLM, tool or human, which can be used to perform tasks collectively via automated chat. This framework allows tool use and human participation via multi-agent conversation. -Please find documentation about this feature [here](/docs/Use-Cases/agent_chat). - -Links to notebook examples: - -### Code Generation, Execution, and Debugging - -- Automated Task Solving with Code Generation, Execution & Debugging - [View Notebook](/notebooks/agentchat_auto_feedback_from_code_execution) -- Automated Code Generation and Question Answering with Retrieval Augmented Agents - [View Notebook](/notebooks/agentchat_RetrieveChat) -- Automated Code Generation and Question Answering with [Qdrant](https://qdrant.tech/) based Retrieval Augmented Agents - [View Notebook](/notebooks/agentchat_RetrieveChat_qdrant) - -### Multi-Agent Collaboration (>3 Agents) - -- Automated Task Solving by Group Chat (with 3 group member agents and 1 manager agent) - [View Notebook](/notebooks/agentchat_groupchat) -- Automated Data Visualization by Group Chat (with 3 group member agents and 1 manager agent) - [View Notebook](/notebooks/agentchat_groupchat_vis) -- Automated Complex Task Solving by Group Chat (with 6 group member agents and 1 manager agent) - [View Notebook](/notebooks/agentchat_groupchat_research) -- Automated Task Solving with Coding & Planning Agents - [View Notebook](/notebooks/agentchat_planning) -- Automated Task Solving with transition paths specified in a graph - [View Notebook](/notebooks/agentchat_groupchat_finite_state_machine) -- Running a group chat as an inner-monolgue via the SocietyOfMindAgent - [View Notebook](/notebooks/agentchat_society_of_mind) -- Running a group chat with custom speaker selection function - [View Notebook](/notebooks/agentchat_groupchat_customized) - -### Sequential Multi-Agent Chats - -- Solving Multiple Tasks in a Sequence of Chats Initiated by a Single Agent - [View Notebook](/notebooks/agentchat_multi_task_chats) -- Async-solving Multiple Tasks in a Sequence of Chats Initiated by a Single Agent - [View Notebook](/notebooks/agentchat_multi_task_async_chats) -- Solving Multiple Tasks in a Sequence of Chats Initiated by Different Agents - [View Notebook](/notebooks/agentchats_sequential_chats) - -### Nested Chats - -- Solving Complex Tasks with Nested Chats - [View Notebook](/notebooks/agentchat_nestedchat) -- Solving Complex Tasks with A Sequence of Nested Chats - [View Notebook](/notebooks/agentchat_nested_sequential_chats) -- OptiGuide for Solving a Supply Chain Optimization Problem with Nested Chats with a Coding Agent and a Safeguard Agent - [View Notebook](/notebooks/agentchat_nestedchat_optiguide) -- Conversational Chess with Nested Chats and Tool Use - [View Notebook](/notebooks/agentchat_nested_chats_chess) - -### Swarms -- Orchestrating agents in a Swarm - [View Notebook](/notebooks/agentchat_swarm) -- Orchestrating agents in a Swarm (Enhanced) - [View Notebook](/notebooks/agentchat_swarm_enhanced) - -### Applications - -- Automated Continual Learning from New Data - [View Notebook](/notebooks/agentchat_stream) -{/* - [OptiGuide](https://github.com/microsoft/optiguide) - Coding, Tool Using, Safeguarding & Question Answering for Supply Chain Optimization */} -- [AutoAnny](https://github.com/ag2ai/build-with-ag2/tree/main/samples/apps/auto-anny) - A Discord bot built using AutoGen - -### RAG - -- GraphRAG agent using FalkorDB (feat. swarms and Google Maps API) - [View Notebook](/notebooks/agentchat_swarm_graphrag_trip_planner) - -### Tool Use - -- **Web Search**: Solve Tasks Requiring Web Info - [View Notebook](/notebooks/agentchat_web_info) -- Use Provided Tools as Functions - [View Notebook](/notebooks/agentchat_function_call_currency_calculator) -- Use Tools via Sync and Async Function Calling - [View Notebook](/notebooks/agentchat_function_call_async) -- Task Solving with Langchain Provided Tools as Functions - [View Notebook](/notebooks/agentchat_langchain) -- **RAG**: Group Chat with Retrieval Augmented Generation (with 5 group member agents and 1 manager agent) - [View Notebook](/notebooks/agentchat_groupchat_RAG) -- Function Inception: Enable AutoGen agents to update/remove functions during conversations. - [View Notebook](/notebooks/agentchat_inception_function) -- Agent Chat with Whisper - [View Notebook](/notebooks/agentchat_video_transcript_translate_with_whisper) -- Constrained Responses via Guidance - [View Notebook](/notebooks/agentchat_guidance) -- Browse the Web with Agents - [View Notebook](/notebooks/agentchat_surfer) -- **SQL**: Natural Language Text to SQL Query using the [Spider](https://yale-lily.github.io/spider) Text-to-SQL Benchmark - [View Notebook](/notebooks/agentchat_sql_spider) -- **Web Scraping**: Web Scraping with Apify - [View Notebook](/notebooks/agentchat_webscraping_with_apify) -- **Write a software app, task by task, with specially designed functions.** - [View Notebook](/notebooks/agentchat_function_call_code_writing). - -### Human Involvement - -- Simple example in ChatGPT style [View example](https://github.com/ag2ai/build-with-ag2/blob/main/samples/simple_chat.py) -- Auto Code Generation, Execution, Debugging and **Human Feedback** - [View Notebook](/notebooks/agentchat_human_feedback) -- Automated Task Solving with GPT-4 + **Multiple Human Users** - [View Notebook](/notebooks/agentchat_two_users) -- Agent Chat with **Async Human Inputs** - [View Notebook](/notebooks/async_human_input) - -### Agent Teaching and Learning - -- Teach Agents New Skills & Reuse via Automated Chat - [View Notebook](/notebooks/agentchat_teaching) -- Teach Agents New Facts, User Preferences and Skills Beyond Coding - [View Notebook](/notebooks/agentchat_teachability) -- Teach OpenAI Assistants Through GPTAssistantAgent - [View Notebook](/notebooks/agentchat_teachable_oai_assistants) -- Agent Optimizer: Train Agents in an Agentic Way - [View Notebook](/notebooks/agentchat_agentoptimizer) - -### Multi-Agent Chat with OpenAI Assistants in the loop - -- Hello-World Chat with OpenAi Assistant in AutoGen - [View Notebook](/notebooks/agentchat_oai_assistant_twoagents_basic) -- Chat with OpenAI Assistant using Function Call - [View Notebook](/notebooks/agentchat_oai_assistant_function_call) -- Chat with OpenAI Assistant with Code Interpreter - [View Notebook](/notebooks/agentchat_oai_code_interpreter) -- Chat with OpenAI Assistant with Retrieval Augmentation - [View Notebook](/notebooks/agentchat_oai_assistant_retrieval) -- OpenAI Assistant in a Group Chat - [View Notebook](/notebooks/agentchat_oai_assistant_groupchat) -- GPTAssistantAgent based Multi-Agent Tool Use - [View Notebook](/notebooks/gpt_assistant_agent_function_call) - -### Non-OpenAI Models -- Conversational Chess using non-OpenAI Models - [View Notebook](/notebooks/agentchat_nested_chats_chess_altmodels) - -### Multimodal Agent - -- Multimodal Agent Chat with DALLE and GPT-4V - [View Notebook](/notebooks/agentchat_dalle_and_gpt4v) -- Multimodal Agent Chat with Llava - [View Notebook](/notebooks/agentchat_lmm_llava) -- Multimodal Agent Chat with GPT-4V - [View Notebook](/notebooks/agentchat_lmm_gpt-4v) - -### Long Context Handling - -{/* - Conversations with Chat History Compression Enabled - [View Notebook](https://github.com/ag2ai/ag2/blob/main/notebook/agentchat_compression.ipynb) */} -- Long Context Handling as A Capability - [View Notebook](/notebooks/agentchat_transform_messages) - -### Evaluation and Assessment - -- AgentEval: A Multi-Agent System for Assess Utility of LLM-powered Applications - [View Notebook](/notebooks/agenteval_cq_math) - -### Automatic Agent Building - -- Automatically Build Multi-agent System with AgentBuilder - [View Notebook](/notebooks/autobuild_basic) -- Automatically Build Multi-agent System from Agent Library - [View Notebook](/notebooks/autobuild_agent_library) - -### Observability -- Track LLM calls, tool usage, actions and errors using AgentOps - [View Notebook](/notebooks/agentchat_agentops) -- Cost Calculation - [View Notebook](/notebooks/agentchat_cost_token_tracking) - -## Enhanced Inferences - -### Utilities - -- API Unification - [View Documentation with Code Example](https://docs.ag2.ai/docs/Use-Cases/enhanced_inference#api-unification) -- Utility Functions to Help Managing API configurations effectively - [View Notebook](/docs/topics/llm_configuration) - -### Inference Hyperparameters Tuning - -AutoGen offers a cost-effective hyperparameter optimization technique [EcoOptiGen](https://arxiv.org/abs/2303.04673) for tuning Large Language Models. The research study finds that tuning hyperparameters can significantly improve the utility of them. -Please find documentation about this feature [here](/docs/Use-Cases/enhanced_inference). - -Links to notebook examples: -* [Optimize for Code Generation](https://github.com/ag2ai/ag2/blob/main/notebook/oai_completion.ipynb) | [Open in colab](https://colab.research.google.com/github/ag2ai/ag2/blob/main/notebook/oai_completion.ipynb) -* [Optimize for Math](https://github.com/ag2ai/ag2/blob/main/notebook/oai_chatgpt_gpt4.ipynb) | [Open in colab](https://colab.research.google.com/github/ag2ai/ag2/blob/main/notebook/oai_chatgpt_gpt4.ipynb) diff --git a/website/docs/FAQ.mdx b/website/docs/FAQ.mdx index 8e9a531e6d..ff77084a92 100644 --- a/website/docs/FAQ.mdx +++ b/website/docs/FAQ.mdx @@ -3,15 +3,20 @@ title: Frequently Asked Questions sidebarTitle: FAQ --- -## Install the correct package - `autogen` +## Install the correct package -The name of Autogen package at PyPI is `autogen`: - -``` +The valid aliases of AG2 package at PyPI are `ag2`, `autogen`, and `pyautogen`. +Any one of the following three lines works: +```bash +pip install ag2 pip install autogen +pip install pyautogen ``` -Typical errors that you might face when using the wrong package are `AttributeError: module 'autogen' has no attribute 'Agent'`, `AttributeError: module 'autogen' has no attribute 'config_list_from_json'` etc. +After installation of any of them, you can import `autogen` using the same import statement: +```python +import autogen +``` ## Set your API endpoints @@ -33,8 +38,8 @@ In version >=1, OpenAI renamed their `api_base` parameter to `base_url`. So for Yes. You currently have two options: -- Autogen can work with any API endpoint which complies with OpenAI-compatible RESTful APIs - e.g. serving local LLM via FastChat or LM Studio. Please check [here](/blog/2023-07-14-Local-LLMs) for an example. -- You can supply your own custom model implementation and use it with Autogen. Please check [here](/blog/2024-01-26-Custom-Models) for more information. +- `autogen` can work with any API endpoint which complies with OpenAI-compatible RESTful APIs - e.g. serving local LLM via FastChat or LM Studio. Please check [here](/blog/2023-07-14-Local-LLMs) for an example. +- You can supply your own custom model implementation and use it with `autogen`. Please check [here](/blog/2024-01-26-Custom-Models) for more information. ## Handle Rate Limit Error and Timeout Error @@ -104,12 +109,12 @@ By default it runs code in a docker container. If you want to run code locally for each code-execution agent, or set `AUTOGEN_USE_DOCKER` to `False` as an environment variable. -You can also develop your AutoGen application in a docker container. +You can also develop your AG2 application in a docker container. For example, when developing in [GitHub codespace](https://codespaces.new/ag2ai/ag2?quickstart=1), -AutoGen runs in a docker container. +AG2 runs in a docker container. If you are not developing in GitHub Codespaces, follow instructions [here](/docs/installation/Docker#step-1-install-docker) -to install and run AutoGen in docker. +to install and run AG2 in docker. ## Agents keep thanking each other when using `gpt-3.5-turbo` @@ -156,9 +161,7 @@ Explanation: Per [this gist](https://gist.github.com/defulmere/8b9695e415a442710 ## How to register a reply function -(from [issue #478](https://github.com/microsoft/autogen/issues/478)) - -See here /docs/reference/agentchat/conversable_agent/#register_reply +See [here](/docs/reference/agentchat/conversable_agent/#register_reply) For example, you can register a reply function that gets called when `generate_reply` is called for an agent. @@ -187,11 +190,11 @@ In the above, we register a `print_messages` function that is called each time t ## How to get last message ? -Refer to /docs/reference/agentchat/conversable_agent/#last_message +Refer to [here](/docs/reference/agentchat/conversable_agent/#last_message) ## How to get each agent message ? -Please refer to /docs/reference/agentchat/conversable_agent#chat_messages +Please refer to [here](/docs/reference/agentchat/conversable_agent#chat_messages) ## When using autogen docker, is it always necessary to reinstall modules? @@ -228,7 +231,7 @@ You can also disable the cache. See [here](/docs/topics/llm-caching#disabling-ca ## Agents are throwing due to docker not running, how can I resolve this? -If running AutoGen locally the default for agents who execute code is for them to try and perform code execution within a docker container. If docker is not running, this will cause the agent to throw an error. To resolve this you have some options. +If running AG2 locally the default for agents who execute code is for them to try and perform code execution within a docker container. If docker is not running, this will cause the agent to throw an error. To resolve this you have some options. ### If you want to disable code execution entirely @@ -260,11 +263,11 @@ user_proxy = autogen.UserProxyAgent( ### What should I do if I get the error "TypeError: Assistants.create() got an unexpected keyword argument 'file_ids'"? -This error typically occurs when using Autogen version earlier than 0.2.27 in combination with OpenAI library version 1.21 or later. The issue arises because the older version of Autogen does not support the file_ids parameter used by newer versions of the OpenAI API. -To resolve this issue, you need to upgrade your Autogen library to version 0.2.27 or higher that ensures compatibility between Autogen and the OpenAI library. +This error typically occurs when using `pyautogen` version earlier than 0.2.27 in combination with OpenAI library version 1.21 or later. The issue arises because the older version of `pyautogen` does not support the file_ids parameter used by newer versions of the OpenAI API. +To resolve this issue, you need to upgrade your `pyautogen` package to version 0.2.27 or higher that ensures compatibility between AG2 and the OpenAI library. ```python -pip install --upgrade autogen +pip install --upgrade pyautogen ``` ## None of the devcontainers are building due to "Hash sum mismatch", what should I do? diff --git a/website/docs/Gallery.mdx b/website/docs/Gallery.mdx index 7173812a6f..ca61fbe83b 100644 --- a/website/docs/Gallery.mdx +++ b/website/docs/Gallery.mdx @@ -7,10 +7,10 @@ import { GalleryPage } from "/snippets/components/GalleryPage.mdx"; import { galleryItems } from "/snippets/data/GalleryItems.mdx"; import { ClientSideComponent } from "/snippets/components/ClientSideComponent.mdx"; -This page contains a list of demos that use AutoGen in various applications from the community. +This page contains a list of demos that use AG2 in various applications from the community. **Contribution guide:** -Built something interesting with AutoGen? Submit a PR to add it to the list! See the [Contribution Guide below](#contributing) for more details. +Built something interesting with AG2? Submit a PR to add it to the list! See the [Contribution Guide below](#contributing) for more details. @@ -26,9 +26,9 @@ Thank you for your interest in contributing! To add your demo to the gallery, pl ``` { - "title": "AutoGen Playground", // The title of your demo + "title": "AG2 Playground", // The title of your demo "link": "https://huggingface.co/spaces/thinkall/AutoGen_Playground", // URL to your demo - "description": "A space to explore the capabilities of AutoGen.", // A brief description + "description": "A space to explore the capabilities of AG2.", // A brief description "image": "default.png", // Filename of the image present in the `static/img/gallery` directory or URL of the hosted image "tags": ["ui"] // Tags to categorize your demo } @@ -44,9 +44,9 @@ Thank you for your interest in contributing! To add your demo to the gallery, pl | | If the image is hosted online, ensure the URL is correctly added to the entry. | | `tags` | Add up to two tags that best describe your demo for clarity. | | | Choose from the existing tags: | - | | - `app`: Using Autogen for specific applications. | - | | - `extension`: Enhancing AutoGen beyond the features in the current version. | - | | - `ui`: Building a user interface for AutoGen. | - | | - `tool`: Strengthening AutoGen Agents with external tools. | + | | - `app`: Using AG2 for specific applications. | + | | - `extension`: Enhancing AG2 beyond the features in the current version. | + | | - `ui`: Building a user interface for AG2. | + | | - `tool`: Strengthening AG2 Agents with external tools. | | | - `groupchat`: Solving complex tasks with a group of Agents. | | | Or propose new tags if the existing ones do not describe your demo. Ensure they are descriptive and concise. | diff --git a/website/process_notebooks.py b/website/process_notebooks.py index 2868ba7ce0..1a371855a9 100755 --- a/website/process_notebooks.py +++ b/website/process_notebooks.py @@ -592,7 +592,7 @@ def f() -> None: def copy_examples_mdx_files(website_dir: str) -> None: # The mdx files to copy to the notebooks directory - example_section_mdx_files = ["Examples", "Gallery", "Notebooks"] + example_section_mdx_files = ["Gallery", "Notebooks"] # Create notebooks directory if it doesn't exist website_dir = Path(website_dir) @@ -669,7 +669,7 @@ def extract_example_group(metadata_path): ], } - example_group = {"group": "Examples", "pages": ["notebooks/Examples", notebooks_entry, "notebooks/Gallery"]} + example_group = {"group": "Examples", "pages": [notebooks_entry, "notebooks/Gallery"]} return example_group diff --git a/website/snippets/data/NotebooksMetadata.mdx b/website/snippets/data/NotebooksMetadata.mdx index 812b7e9d13..6ace92fb1d 100644 --- a/website/snippets/data/NotebooksMetadata.mdx +++ b/website/snippets/data/NotebooksMetadata.mdx @@ -624,7 +624,7 @@ export const notebooksMetadata = [ }, { "title": "RealtimeAgent in a Swarm Orchestration", - "link": "/notebooks/agentchat_realtime_swarm", + "link": "/notebooks/agentchat_realtime_swarm_websocket", "description": "Swarm Ochestration", "image": null, "tags": [ @@ -632,7 +632,7 @@ export const notebooksMetadata = [ "group chat", "swarm" ], - "source": "/notebook/agentchat_realtime_swarm.ipynb" + "source": "/notebook/agentchat_realtime_swarm_websocket.ipynb" }, { "title": "RealtimeAgent with local websocket connection",