60 lines
2.0 KiB
Python
60 lines
2.0 KiB
Python
"""
|
||
llm/provider_factory.py
|
||
Provider 工厂:根据 settings.llm.provider 自动实例化对应 Provider
|
||
"""
|
||
|
||
from config.settings import LLMConfig, settings
|
||
from llm.providers.base_provider import BaseProvider
|
||
from utils.logger import get_logger
|
||
|
||
_logger = get_logger("LLM")
|
||
|
||
|
||
def create_provider(cfg: LLMConfig | None = None) -> BaseProvider:
|
||
"""
|
||
工厂函数:根据配置创建对应的 LLM Provider
|
||
|
||
Args:
|
||
cfg: LLMConfig 实例,None 时从全局 settings 读取
|
||
|
||
Returns:
|
||
BaseProvider 子类实例
|
||
|
||
支持的 provider:
|
||
- "openai" → OpenAIProvider(含兼容 OpenAI 协议的代理)
|
||
- "anthropic" → AnthropicProvider(预留)
|
||
- "ollama" → OllamaProvider(预留)
|
||
|
||
Raises:
|
||
ValueError: provider 名称不支持时
|
||
"""
|
||
cfg = cfg or settings.llm
|
||
_logger.info(f"🏭 Provider 工厂: 创建 [{cfg.provider}] Provider")
|
||
|
||
match cfg.provider.lower():
|
||
|
||
case "openai":
|
||
from llm.providers.openai_provider import OpenAIProvider
|
||
return OpenAIProvider(cfg)
|
||
|
||
case "anthropic":
|
||
# 预留:Anthropic Provider
|
||
# from llm.providers.anthropic_provider import AnthropicProvider
|
||
# return AnthropicProvider(cfg)
|
||
_logger.warning("⚠️ Anthropic Provider 尚未实现,回退到 OpenAI")
|
||
from llm.providers.openai_provider import OpenAIProvider
|
||
return OpenAIProvider(cfg)
|
||
|
||
case "ollama":
|
||
# 预留:Ollama 本地模型 Provider
|
||
# from llm.providers.ollama_provider import OllamaProvider
|
||
# return OllamaProvider(cfg)
|
||
_logger.warning("⚠️ Ollama Provider 尚未实现,回退到 OpenAI")
|
||
from llm.providers.openai_provider import OpenAIProvider
|
||
return OpenAIProvider(cfg)
|
||
|
||
case _:
|
||
raise ValueError(
|
||
f"不支持的 provider: '{cfg.provider}',"
|
||
f"可选值: openai / anthropic / ollama"
|
||
) |