base_agent/config.yaml

119 lines
5.3 KiB
YAML
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

# ════════════════════════════════════════════════════════════════
# config/config.yaml
# Agent 系统全局配置文件
# ════════════════════════════════════════════════════════════════
# ── LLM 模型配置 ───────────────────────────────────────────────
llm:
provider: "openai" # 模型提供商: openai | anthropic | ollama | local
model_name: "gpt-4o" # 模型名称
api_key: "sk-AUmOuFI731Ty5Nob38jY26d8lydfDT-QkE2giqb0sCuPCAE2JH6zjLM4lZLpvL5WMYPOocaMe2FwVDmqM_9KimmKACjR" # API Key优先读取环境变量 LLM_API_KEY
api_base_url: "https://openapi.monica.im/v1" # 自定义 API 地址(兼容第三方 OpenAI 代理)
max_tokens: 4096 # 最大输出 Token 数
temperature: 0.7 # 生成温度 0.0~1.0
timeout: 60 # 请求超时(秒)
max_retries: 3 # 失败自动重试次数
# OpenAI 专用
function_calling: true # 是否启用 Function Calling工具规划核心
stream: false # 是否启用流式输出
# Ollama / 本地模型专用
model_path: "" # 本地模型路径,例如 /models/llama3
ollama_host: "http://localhost:11434"
# ── MCP Server 配置 ────────────────────────────────────────────
mcp:
server_name: "DemoMCPServer"
transport: "stdio"
host: "localhost"
port: 3000
enabled_tools:
- calculator
- web_search
- file_reader
- code_executor
- static_analyzer
- ssh_docker
# ── 工具配置 ───────────────────────────────────────────────────
tools:
web_search:
max_results: 5
timeout: 10
api_key: ""
engine: "mock"
file_reader:
allowed_root: "./workspace"
max_file_size_kb: 512
code_executor:
timeout: 5
sandbox: true
calculator:
precision: 10
# ── C/C++ 静态分析 ──────────────────────────────────────────
static_analyzer:
default_tool: "cppcheck" # cppcheck | clang-tidy | infer
default_std: "c++17" # c89 | c99 | c11 | c++11 | c++14 | c++17 | c++20
timeout: 120 # 分析超时(秒)
jobs: 4 # 并行线程数cppcheck -j 参数)
output_format: "summary" # summary | json | full
max_issues: 500 # 最多返回问题条数
# 允许分析的目录白名单,空列表表示不限制
allowed_roots: [ ]
# 各工具的额外默认参数
tool_extra_args:
cppcheck: "--suppress=missingIncludeSystem --suppress=unmatchedSuppression"
clang-tidy: "--checks=*,-fuchsia-*,-google-*,-zircon-*"
infer: ""
# ── SSH Docker 部署 ─────────────────────────────────────────
ssh_docker:
default_ssh_port: 22
default_username: "root"
connect_timeout: 30 # SSH 连接超时(秒)
cmd_timeout: 120 # 单条命令执行超时(秒)
deploy_timeout: 300 # 镜像拉取/部署超时(秒)
default_restart_policy: "unless-stopped"
default_tail_lines: 100
# 安全:允许操作的服务器白名单,空列表表示不限制
allowed_hosts: [ ]
# 安全:禁止使用的镜像前缀
blocked_images: [ ]
# 是否允许 --privileged 模式
allow_privileged: false
# 已知服务器预设(可选,避免每次传入认证信息)
servers: { }
# 示例:
# servers:
# prod:
# host: "192.168.1.100"
# port: 22
# username: "deploy"
# key_path: "/home/ci/.ssh/id_rsa"
# staging:
# host: "192.168.1.200"
# port: 22
# username: "ubuntu"
# password: "" # 留空则读取环境变量 SSH_STAGING_PASSWORD
# ── 记忆配置 ───────────────────────────────────────────────────
memory:
max_history: 20
enable_long_term: false
vector_db_url: ""
# ── 日志配置 ───────────────────────────────────────────────────
logging:
level: "DEBUG"
enable_file: true
log_dir: "./logs"
log_file: "agent.log"
# ── Agent 行为配置 ─────────────────────────────────────────────
agent:
max_chain_steps: 10
enable_multi_step: true
session_timeout: 3600
fallback_to_rules: true # API 调用失败时是否降级到规则引擎