Fix OllamaChatCompletionClient load_component() error by adding to WELL_KNOWN_PROVIDERS (#7030)

Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: ekzhu <320302+ekzhu@users.noreply.github.com>
Co-authored-by: Eric Zhu <ekzhu@users.noreply.github.com>
This commit is contained in:
Copilot
2025-09-16 02:30:27 -07:00
committed by GitHub
parent e0e39e475b
commit c469fc0d8c
2 changed files with 44 additions and 0 deletions

View File

@@ -49,6 +49,7 @@ WELL_KNOWN_PROVIDERS = {
"AzureOpenAIChatCompletionClient": "autogen_ext.models.openai.AzureOpenAIChatCompletionClient",
"openai_chat_completion_client": "autogen_ext.models.openai.OpenAIChatCompletionClient",
"OpenAIChatCompletionClient": "autogen_ext.models.openai.OpenAIChatCompletionClient",
"OllamaChatCompletionClient": "autogen_ext.models.ollama.OllamaChatCompletionClient",
}

View File

@@ -1313,3 +1313,46 @@ async def test_tool_choice_default_behavior(monkeypatch: pytest.MonkeyPatch) ->
assert len(create_result.content) > 0
assert isinstance(create_result.content[0], FunctionCall)
assert create_result.content[0].name == add_tool.name
def test_ollama_load_component() -> None:
"""Test that OllamaChatCompletionClient can be loaded via ChatCompletionClient.load_component()."""
from autogen_core.models import ChatCompletionClient
# Test the exact configuration from the issue
config = {
"provider": "OllamaChatCompletionClient",
"config": {
"model": "qwen3",
"host": "http://1.2.3.4:30130",
},
}
# This should not raise an error anymore
client = ChatCompletionClient.load_component(config)
# Verify we got the right type of client
assert isinstance(client, OllamaChatCompletionClient)
assert client._model_name == "qwen3" # type: ignore[reportPrivateUsage]
# Test that the config was applied correctly
create_args = client.get_create_args()
assert create_args["model"] == "qwen3" # type: ignore[reportPrivateUsage]
def test_ollama_load_component_via_class() -> None:
"""Test that OllamaChatCompletionClient can be loaded via the class directly."""
config = {
"provider": "OllamaChatCompletionClient",
"config": {
"model": "llama3.2",
"host": "http://localhost:11434",
},
}
# Load via the specific class
client = OllamaChatCompletionClient.load_component(config)
# Verify we got the right type and configuration
assert isinstance(client, OllamaChatCompletionClient)
assert client._model_name == "llama3.2" # type: ignore[reportPrivateUsage]