base_agent/llm/provider_factory.py

60 lines
2.0 KiB
Python
Raw Normal View History

2026-03-09 05:37:29 +00:00
"""
llm/provider_factory.py
Provider 工厂根据 settings.llm.provider 自动实例化对应 Provider
"""
from config.settings import LLMConfig, settings
from llm.providers.base_provider import BaseProvider
from utils.logger import get_logger
_logger = get_logger("LLM")
def create_provider(cfg: LLMConfig | None = None) -> BaseProvider:
"""
工厂函数根据配置创建对应的 LLM Provider
Args:
cfg: LLMConfig 实例None 时从全局 settings 读取
Returns:
BaseProvider 子类实例
支持的 provider:
- "openai" OpenAIProvider含兼容 OpenAI 协议的代理
- "anthropic" AnthropicProvider预留
- "ollama" OllamaProvider预留
Raises:
ValueError: provider 名称不支持时
"""
cfg = cfg or settings.llm
_logger.info(f"🏭 Provider 工厂: 创建 [{cfg.provider}] Provider")
match cfg.provider.lower():
case "openai":
from llm.providers.openai_provider import OpenAIProvider
return OpenAIProvider(cfg)
case "anthropic":
# 预留Anthropic Provider
# from llm.providers.anthropic_provider import AnthropicProvider
# return AnthropicProvider(cfg)
_logger.warning("⚠️ Anthropic Provider 尚未实现,回退到 OpenAI")
from llm.providers.openai_provider import OpenAIProvider
return OpenAIProvider(cfg)
case "ollama":
# 预留Ollama 本地模型 Provider
# from llm.providers.ollama_provider import OllamaProvider
# return OllamaProvider(cfg)
_logger.warning("⚠️ Ollama Provider 尚未实现,回退到 OpenAI")
from llm.providers.openai_provider import OpenAIProvider
return OpenAIProvider(cfg)
case _:
raise ValueError(
f"不支持的 provider: '{cfg.provider}'"
f"可选值: openai / anthropic / ollama"
)