Files
deploy.stack/mcp_server_go/config.toml

31 lines
987 B
TOML
Raw Normal View History

# MCP Server Go 配置文件
# 服务器基本配置
[server]
listen_addr = "0.0.0.0:8080"
read_timeout = 30 write_timeout = 30 # 超时时间(秒)
max_header_bytes = 1048576 # 1MB
# OpenAI API 配置
[openai]
api_key = "your_api_key_here" # OpenAI API密钥
base_url = "https://api.openai.com/v1" # API基础URL
model = "gpt-3.5-turbo" # 使用的模型
temperature = 0.7 # 生成内容的随机性
max_tokens = 1000 # 最大生成token数
request_timeout = 60 # 请求超时时间(秒)
# 日志配置
[logging]
level = "info" # 日志级别: debug, info, warn, error
format = "text" # 日志格式: text, json
output_path = "logs/mcp_server.log" # 日志文件路径
max_size = 100 # 单个日志文件最大大小(MB)
max_age = 7 # 日志保留天数
max_backups = 5 # 最大备份文件数
compress = false # 是否压缩归档日志
# 安全配置
[security]
allowed_origins = ["*"]
allowed_methods = ["GET", "POST", "OPTIONS"]
allowed_headers = ["Content-Type", "Authorization"]