feat: 重构日志处理和错误输出格式

refactor: 重构日志处理和错误输出格式
fix: 修正配置加载路径和温度参数类型
docs: 更新结构体字段注释格式
This commit is contained in:
cnphpbb
2025-09-11 10:34:04 +08:00
parent 4b81647da1
commit 51f4b3c8eb
4 changed files with 95 additions and 142 deletions

View File

@@ -8,6 +8,7 @@ import (
"log/slog"
"net/http"
"os"
"os/signal"
"path/filepath"
"strings"
"syscall"
@@ -25,42 +26,69 @@ import (
// Config 结构体用于存储配置
type Config struct {
Server ServerConfig `mapstructure:"server"` OpenAI OpenAIConfig `mapstructure:"openai"` Logging LoggingConfig `mapstructure:"logging"` Security SecurityConfig `mapstructure:"security"`
Server ServerConfig `mapstructure:"server"`
OpenAI OpenAIConfig `mapstructure:"openai"`
Logging LoggingConfig `mapstructure:"logging"`
Security SecurityConfig `mapstructure:"security"`
}
// ServerConfig 服务器配置
type ServerConfig struct {
ListenAddr string `mapstructure:"listen_addr"` ReadTimeout int `mapstructure:"read_timeout"` WriteTimeout int `mapstructure:"write_timeout"` MaxHeaderBytes int `mapstructure:"max_header_bytes"`
ListenAddr string `mapstructure:"listen_addr"`
ReadTimeout int `mapstructure:"read_timeout"`
WriteTimeout int `mapstructure:"write_timeout"`
MaxHeaderBytes int `mapstructure:"max_header_bytes"`
}
// OpenAIConfig OpenAI API配置
type OpenAIConfig struct {
APIKey string `mapstructure:"api_key"` BaseURL string `mapstructure:"base_url"` Model string `mapstructure:"model"` Temperature float64 `mapstructure:"temperature"` MaxTokens int `mapstructure:"max_tokens"` RequestTimeout int `mapstructure:"request_timeout"`
APIKey string `mapstructure:"api_key"`
BaseURL string `mapstructure:"base_url"`
Model string `mapstructure:"model"`
Temperature float64 `mapstructure:"temperature"`
MaxTokens int `mapstructure:"max_tokens"`
RequestTimeout int `mapstructure:"request_timeout"`
}
// LoggingConfig 日志配置
type LoggingConfig struct {
Level string `mapstructure:"level"` Format string `mapstructure:"format"` OutputPath string `mapstructure:"output_path"` MaxSize int `mapstructure:"max_size"` MaxAge int `mapstructure:"max_age"` MaxBackups int `mapstructure:"max_backups"` Compress bool `mapstructure:"compress"`
Level string `mapstructure:"level"`
Format string `mapstructure:"format"`
OutputPath string `mapstructure:"output_path"`
MaxSize int `mapstructure:"max_size"`
MaxAge int `mapstructure:"max_age"`
MaxBackups int `mapstructure:"max_backups"`
Compress bool `mapstructure:"compress"`
}
// SecurityConfig 安全配置
type SecurityConfig struct {
AllowedOrigins []string `mapstructure:"allowed_origins"` AllowedMethods []string `mapstructure:"allowed_methods"` AllowedHeaders []string `mapstructure:"allowed_headers"`
AllowedOrigins []string `mapstructure:"allowed_origins"`
AllowedMethods []string `mapstructure:"allowed_methods"`
AllowedHeaders []string `mapstructure:"allowed_headers"`
}
// MCPRequest MCP请求结构体
type MCPRequest struct {
Data interface{} `json:"data"` Type string `json:"type"` Metadata map[string]string `json:"metadata,omitempty"` Timestamp int64 `json:"timestamp"`
Data interface{} `json:"data"`
Type string `json:"type"`
Metadata map[string]string `json:"metadata,omitempty"`
Timestamp int64 `json:"timestamp"`
}
// MCPResponse MCP响应结构体
type MCPResponse struct {
Success bool `json:"success"` Message string `json:"message,omitempty"` Data interface{} `json:"data,omitempty"` AIResult interface{} `json:"ai_result,omitempty"` RequestID string `json:"request_id,omitempty"` Timestamp int64 `json:"timestamp"`
Success bool `json:"success"`
Message string `json:"message,omitempty"`
Data interface{} `json:"data,omitempty"`
AIResult interface{} `json:"ai_result,omitempty"`
RequestID string `json:"request_id,omitempty"`
Timestamp int64 `json:"timestamp"`
}
var (
config Config
logger *slog.Logger
config Config
logger *slog.Logger
openaiClient *openai.Client
// 速率限制器
limiter = rate.NewLimiter(rate.Limit(10), 20)
@@ -69,7 +97,7 @@ var (
prometheus.CounterOpts{
Name: "mcp_server_requests_total",
Help: "Total number of MCP server requests",
},
},
[]string{"endpoint", "status"},
)
requestDuration = prometheus.NewHistogramVec(
@@ -77,7 +105,7 @@ var (
Name: "mcp_server_request_duration_seconds",
Help: "Duration of MCP server requests in seconds",
Buckets: prometheus.DefBuckets,
},
},
[]string{"endpoint"},
)
)
@@ -95,6 +123,7 @@ func loadConfig() error {
viper.AddConfigPath(".")
viper.AddConfigPath("/etc/mcp_server/")
viper.AddConfigPath("$HOME/.mcp_server/")
viper.AddConfigPath("$HOME/.config/mcp_server/")
if err := viper.ReadInConfig(); err != nil {
return fmt.Errorf("读取配置文件失败: %w", err)
@@ -149,20 +178,7 @@ func initLogger() error {
logLevel = slog.LevelInfo
}
// 创建日志处理
var handler slog.Handler
if config.Logging.Format == "json" {
handler = slog.NewJSONHandler(logFile, &slog.HandlerOptions{
Level: logLevel,
})
} else {
handler = slog.NewTextHandler(logFile, &slog.HandlerOptions{
Level: logLevel,
})
}
// 同时输出到控制台和文件
multiHandler := slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{Level: logLevel})
// 创建日志器,同时输出到控制台和文件
logger = slog.New(slog.NewTextHandler(
io.MultiWriter(os.Stdout, logFile),
&slog.HandlerOptions{Level: logLevel},
@@ -225,7 +241,7 @@ func loggingMiddleware(next http.Handler) http.Handler {
wrappedWriter := &responseWriter{
ResponseWriter: w,
statusCode: http.StatusOK,
statusCode: http.StatusOK,
}
next.ServeHTTP(wrappedWriter, r)
@@ -272,9 +288,9 @@ func healthCheckHandler(w http.ResponseWriter, r *http.Request) {
}
response := map[string]interface{}{
"status": "ok",
"version": "1.0.0",
"timestamp": time.Now().Unix(),
"status": "ok",
"version": "1.0.0",
"timestamp": time.Now().Unix(),
"openai_health": oaiHealthy,
}
@@ -296,7 +312,7 @@ func submitHandler(w http.ResponseWriter, r *http.Request) {
// 解析请求体
var mcpRequest MCPRequest
if err := json.NewDecoder(r.Body).Decode(&mcpRequest); err != nil {
logger.Error("解析请求体失败", slog.String("request_id", requestID), slog.Error(err))
logger.Error("解析请求体失败", slog.String("request_id", requestID), slog.Any("error", err))
http.Error(w, "无效的请求体", http.StatusBadRequest)
requestCounter.WithLabelValues("/mcp/v1/submit", "400").Inc()
return
@@ -310,7 +326,7 @@ func submitHandler(w http.ResponseWriter, r *http.Request) {
// 调用OpenAI API进行分析
aIResult, err := analyzeWithOpenAI(r.Context(), mcpRequest, requestID)
if err != nil {
logger.Error("OpenAI API调用失败", slog.String("request_id", requestID), slog.Error(err))
logger.Error("OpenAI API调用失败", slog.String("request_id", requestID), slog.Any("error", err))
http.Error(w, "AI分析失败", http.StatusInternalServerError)
requestCounter.WithLabelValues("/mcp/v1/submit", "500").Inc()
return
@@ -329,7 +345,7 @@ func submitHandler(w http.ResponseWriter, r *http.Request) {
// 发送响应
w.Header().Set("Content-Type", "application/json")
if err := json.NewEncoder(w).Encode(response); err != nil {
logger.Error("发送响应失败", slog.String("request_id", requestID), slog.Error(err))
logger.Error("发送响应失败", slog.String("request_id", requestID), slog.Any("error", err))
}
requestCounter.WithLabelValues("/mcp/v1/submit", "200").Inc()
@@ -371,7 +387,7 @@ func analyzeWithOpenAI(ctx context.Context, request MCPRequest, requestID string
Content: prompt,
},
},
Temperature: config.OpenAI.Temperature,
Temperature: float32(config.OpenAI.Temperature),
MaxTokens: config.OpenAI.MaxTokens,
}
@@ -445,7 +461,7 @@ func main() {
// 启动服务器
go func() {
if err := srv.ListenAndServe(); err != nil && err != http.ErrServerClosed {
logger.Error("服务器启动失败", slog.Error(err))
logger.Error("服务器启动失败", slog.Any("error", err))
os.Exit(1)
}
}()
@@ -465,9 +481,9 @@ func main() {
// 优雅关闭服务器
if err := srv.Shutdown(ctx); err != nil {
logger.Error("服务器关闭失败", slog.Error(err))
logger.Error("服务器关闭失败", slog.Any("error", err))
os.Exit(1)
}
logger.Info("MCP服务器已安全关闭")
}
}