forked from DevOps/deploy.stack
feat: 初始化MCP服务器Go版本实现
添加MCP服务器Go版本的核心功能,包括: - 配置文件管理 - 日志系统 - OpenAI API集成 - HTTP服务器和API端点 - 健康检查和监控 - 安装脚本和文档
This commit is contained in:
473
mcp_server_go/main.go
Normal file
473
mcp_server_go/main.go
Normal file
@@ -0,0 +1,473 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/fsnotify/fsnotify"
|
||||
"github.com/google/uuid"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/prometheus/client_golang/prometheus/promhttp"
|
||||
"github.com/spf13/viper"
|
||||
"golang.org/x/time/rate"
|
||||
|
||||
openai "github.com/sashabaranov/go-openai"
|
||||
)
|
||||
|
||||
// Config 结构体用于存储配置
|
||||
type Config struct {
|
||||
Server ServerConfig `mapstructure:"server"` OpenAI OpenAIConfig `mapstructure:"openai"` Logging LoggingConfig `mapstructure:"logging"` Security SecurityConfig `mapstructure:"security"`
|
||||
}
|
||||
|
||||
// ServerConfig 服务器配置
|
||||
type ServerConfig struct {
|
||||
ListenAddr string `mapstructure:"listen_addr"` ReadTimeout int `mapstructure:"read_timeout"` WriteTimeout int `mapstructure:"write_timeout"` MaxHeaderBytes int `mapstructure:"max_header_bytes"`
|
||||
}
|
||||
|
||||
// OpenAIConfig OpenAI API配置
|
||||
type OpenAIConfig struct {
|
||||
APIKey string `mapstructure:"api_key"` BaseURL string `mapstructure:"base_url"` Model string `mapstructure:"model"` Temperature float64 `mapstructure:"temperature"` MaxTokens int `mapstructure:"max_tokens"` RequestTimeout int `mapstructure:"request_timeout"`
|
||||
}
|
||||
|
||||
// LoggingConfig 日志配置
|
||||
type LoggingConfig struct {
|
||||
Level string `mapstructure:"level"` Format string `mapstructure:"format"` OutputPath string `mapstructure:"output_path"` MaxSize int `mapstructure:"max_size"` MaxAge int `mapstructure:"max_age"` MaxBackups int `mapstructure:"max_backups"` Compress bool `mapstructure:"compress"`
|
||||
}
|
||||
|
||||
// SecurityConfig 安全配置
|
||||
type SecurityConfig struct {
|
||||
AllowedOrigins []string `mapstructure:"allowed_origins"` AllowedMethods []string `mapstructure:"allowed_methods"` AllowedHeaders []string `mapstructure:"allowed_headers"`
|
||||
}
|
||||
|
||||
// MCPRequest MCP请求结构体
|
||||
type MCPRequest struct {
|
||||
Data interface{} `json:"data"` Type string `json:"type"` Metadata map[string]string `json:"metadata,omitempty"` Timestamp int64 `json:"timestamp"`
|
||||
}
|
||||
|
||||
// MCPResponse MCP响应结构体
|
||||
type MCPResponse struct {
|
||||
Success bool `json:"success"` Message string `json:"message,omitempty"` Data interface{} `json:"data,omitempty"` AIResult interface{} `json:"ai_result,omitempty"` RequestID string `json:"request_id,omitempty"` Timestamp int64 `json:"timestamp"`
|
||||
}
|
||||
|
||||
var (
|
||||
config Config
|
||||
logger *slog.Logger
|
||||
openaiClient *openai.Client
|
||||
// 速率限制器
|
||||
limiter = rate.NewLimiter(rate.Limit(10), 20)
|
||||
// Prometheus指标
|
||||
requestCounter = prometheus.NewCounterVec(
|
||||
prometheus.CounterOpts{
|
||||
Name: "mcp_server_requests_total",
|
||||
Help: "Total number of MCP server requests",
|
||||
},
|
||||
[]string{"endpoint", "status"},
|
||||
)
|
||||
requestDuration = prometheus.NewHistogramVec(
|
||||
prometheus.HistogramOpts{
|
||||
Name: "mcp_server_request_duration_seconds",
|
||||
Help: "Duration of MCP server requests in seconds",
|
||||
Buckets: prometheus.DefBuckets,
|
||||
},
|
||||
[]string{"endpoint"},
|
||||
)
|
||||
)
|
||||
|
||||
func init() {
|
||||
// 注册Prometheus指标
|
||||
prometheus.MustRegister(requestCounter)
|
||||
prometheus.MustRegister(requestDuration)
|
||||
}
|
||||
|
||||
// 加载配置
|
||||
func loadConfig() error {
|
||||
viper.SetConfigName("config")
|
||||
viper.SetConfigType("toml")
|
||||
viper.AddConfigPath(".")
|
||||
viper.AddConfigPath("/etc/mcp_server/")
|
||||
viper.AddConfigPath("$HOME/.mcp_server/")
|
||||
|
||||
if err := viper.ReadInConfig(); err != nil {
|
||||
return fmt.Errorf("读取配置文件失败: %w", err)
|
||||
}
|
||||
|
||||
if err := viper.Unmarshal(&config); err != nil {
|
||||
return fmt.Errorf("解析配置文件失败: %w", err)
|
||||
}
|
||||
|
||||
// 配置文件热重载
|
||||
viper.WatchConfig()
|
||||
viper.OnConfigChange(func(e fsnotify.Event) {
|
||||
logger.Info("配置文件已变更,重新加载", "file", e.Name)
|
||||
if err := viper.Unmarshal(&config); err != nil {
|
||||
logger.Error("重新解析配置文件失败", "error", err)
|
||||
}
|
||||
// 重新初始化OpenAI客户端
|
||||
initOpenAIClient()
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// 初始化日志
|
||||
func initLogger() error {
|
||||
// 创建日志目录
|
||||
logDir := filepath.Dir(config.Logging.OutputPath)
|
||||
if err := os.MkdirAll(logDir, 0755); err != nil {
|
||||
return fmt.Errorf("创建日志目录失败: %w", err)
|
||||
}
|
||||
|
||||
// 打开日志文件
|
||||
logFile, err := os.OpenFile(
|
||||
config.Logging.OutputPath,
|
||||
os.O_CREATE|os.O_WRONLY|os.O_APPEND,
|
||||
0644,
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开日志文件失败: %w", err)
|
||||
}
|
||||
|
||||
// 设置日志级别
|
||||
var logLevel slog.Level
|
||||
switch config.Logging.Level {
|
||||
case "debug":
|
||||
logLevel = slog.LevelDebug
|
||||
case "warn":
|
||||
logLevel = slog.LevelWarn
|
||||
case "error":
|
||||
logLevel = slog.LevelError
|
||||
default:
|
||||
logLevel = slog.LevelInfo
|
||||
}
|
||||
|
||||
// 创建日志处理器
|
||||
var handler slog.Handler
|
||||
if config.Logging.Format == "json" {
|
||||
handler = slog.NewJSONHandler(logFile, &slog.HandlerOptions{
|
||||
Level: logLevel,
|
||||
})
|
||||
} else {
|
||||
handler = slog.NewTextHandler(logFile, &slog.HandlerOptions{
|
||||
Level: logLevel,
|
||||
})
|
||||
}
|
||||
|
||||
// 同时输出到控制台和文件
|
||||
multiHandler := slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{Level: logLevel})
|
||||
logger = slog.New(slog.NewTextHandler(
|
||||
io.MultiWriter(os.Stdout, logFile),
|
||||
&slog.HandlerOptions{Level: logLevel},
|
||||
))
|
||||
slog.SetDefault(logger)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// 初始化OpenAI客户端
|
||||
func initOpenAIClient() {
|
||||
cfg := openai.DefaultConfig(config.OpenAI.APIKey)
|
||||
if config.OpenAI.BaseURL != "" {
|
||||
cfg.BaseURL = config.OpenAI.BaseURL
|
||||
}
|
||||
openaiClient = openai.NewClientWithConfig(cfg)
|
||||
}
|
||||
|
||||
// 创建CORS中间件
|
||||
func corsMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// 设置CORS头
|
||||
for _, origin := range config.Security.AllowedOrigins {
|
||||
w.Header().Set("Access-Control-Allow-Origin", origin)
|
||||
}
|
||||
w.Header().Set("Access-Control-Allow-Methods", strings.Join(config.Security.AllowedMethods, ","))
|
||||
w.Header().Set("Access-Control-Allow-Headers", strings.Join(config.Security.AllowedHeaders, ","))
|
||||
w.Header().Set("Access-Control-Allow-Credentials", "true")
|
||||
|
||||
// 处理预检请求
|
||||
if r.Method == "OPTIONS" {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
return
|
||||
}
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
// 速率限制中间件
|
||||
func rateLimitMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if !limiter.Allow() {
|
||||
http.Error(w, "请求过于频繁,请稍后再试", http.StatusTooManyRequests)
|
||||
return
|
||||
}
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
// 日志中间件
|
||||
func loggingMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
startTime := time.Now()
|
||||
logger.Info("收到请求",
|
||||
slog.String("method", r.Method),
|
||||
slog.String("path", r.URL.Path),
|
||||
slog.String("remote_addr", r.RemoteAddr),
|
||||
)
|
||||
|
||||
wrappedWriter := &responseWriter{
|
||||
ResponseWriter: w,
|
||||
statusCode: http.StatusOK,
|
||||
}
|
||||
|
||||
next.ServeHTTP(wrappedWriter, r)
|
||||
|
||||
duration := time.Since(startTime)
|
||||
logger.Info("请求处理完成",
|
||||
slog.String("method", r.Method),
|
||||
slog.String("path", r.URL.Path),
|
||||
slog.Int("status", wrappedWriter.statusCode),
|
||||
slog.Duration("duration", duration),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
// 响应写入器包装器,用于捕获状态码
|
||||
type responseWriter struct {
|
||||
http.ResponseWriter
|
||||
statusCode int
|
||||
}
|
||||
|
||||
func (rw *responseWriter) WriteHeader(code int) {
|
||||
rw.statusCode = code
|
||||
rw.ResponseWriter.WriteHeader(code)
|
||||
}
|
||||
|
||||
// 健康检查端点
|
||||
func healthCheckHandler(w http.ResponseWriter, r *http.Request) {
|
||||
start := time.Now()
|
||||
defer func() {
|
||||
requestDuration.WithLabelValues("/health").Observe(time.Since(start).Seconds())
|
||||
}()
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
// 检查OpenAI客户端连接
|
||||
oaiHealthy := false
|
||||
if openaiClient != nil {
|
||||
// 创建一个轻量级的测试请求
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
|
||||
defer cancel()
|
||||
|
||||
_, err := openaiClient.ListModels(ctx)
|
||||
oaiHealthy = (err == nil)
|
||||
}
|
||||
|
||||
response := map[string]interface{}{
|
||||
"status": "ok",
|
||||
"version": "1.0.0",
|
||||
"timestamp": time.Now().Unix(),
|
||||
"openai_health": oaiHealthy,
|
||||
}
|
||||
|
||||
json.NewEncoder(w).Encode(response)
|
||||
requestCounter.WithLabelValues("/health", "200").Inc()
|
||||
}
|
||||
|
||||
// MCP数据提交端点
|
||||
func submitHandler(w http.ResponseWriter, r *http.Request) {
|
||||
start := time.Now()
|
||||
defer func() {
|
||||
requestDuration.WithLabelValues("/mcp/v1/submit").Observe(time.Since(start).Seconds())
|
||||
}()
|
||||
|
||||
// 生成请求ID
|
||||
requestID := fmt.Sprintf("req-%d-%s", time.Now().UnixNano(), uuid.New().String()[:8])
|
||||
logger.Info("处理MCP提交请求", slog.String("request_id", requestID))
|
||||
|
||||
// 解析请求体
|
||||
var mcpRequest MCPRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&mcpRequest); err != nil {
|
||||
logger.Error("解析请求体失败", slog.String("request_id", requestID), slog.Error(err))
|
||||
http.Error(w, "无效的请求体", http.StatusBadRequest)
|
||||
requestCounter.WithLabelValues("/mcp/v1/submit", "400").Inc()
|
||||
return
|
||||
}
|
||||
|
||||
// 设置默认时间戳
|
||||
if mcpRequest.Timestamp == 0 {
|
||||
mcpRequest.Timestamp = time.Now().Unix()
|
||||
}
|
||||
|
||||
// 调用OpenAI API进行分析
|
||||
aIResult, err := analyzeWithOpenAI(r.Context(), mcpRequest, requestID)
|
||||
if err != nil {
|
||||
logger.Error("OpenAI API调用失败", slog.String("request_id", requestID), slog.Error(err))
|
||||
http.Error(w, "AI分析失败", http.StatusInternalServerError)
|
||||
requestCounter.WithLabelValues("/mcp/v1/submit", "500").Inc()
|
||||
return
|
||||
}
|
||||
|
||||
// 构建响应
|
||||
response := MCPResponse{
|
||||
Success: true,
|
||||
Message: "数据提交成功",
|
||||
Data: mcpRequest.Data,
|
||||
AIResult: aIResult,
|
||||
RequestID: requestID,
|
||||
Timestamp: time.Now().Unix(),
|
||||
}
|
||||
|
||||
// 发送响应
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
if err := json.NewEncoder(w).Encode(response); err != nil {
|
||||
logger.Error("发送响应失败", slog.String("request_id", requestID), slog.Error(err))
|
||||
}
|
||||
|
||||
requestCounter.WithLabelValues("/mcp/v1/submit", "200").Inc()
|
||||
logger.Info("MCP提交请求处理完成", slog.String("request_id", requestID))
|
||||
}
|
||||
|
||||
// 使用OpenAI API进行数据分析
|
||||
func analyzeWithOpenAI(ctx context.Context, request MCPRequest, requestID string) (interface{}, error) {
|
||||
// 准备超时上下文
|
||||
timeoutCtx, cancel := context.WithTimeout(ctx, time.Duration(config.OpenAI.RequestTimeout)*time.Second)
|
||||
defer cancel()
|
||||
|
||||
// 将请求数据转换为字符串
|
||||
dataJSON, err := json.Marshal(request.Data)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("数据序列化失败: %w", err)
|
||||
}
|
||||
|
||||
// 构建提示词
|
||||
prompt := fmt.Sprintf(`请分析以下数据并提供见解:
|
||||
|
||||
数据类型: %s
|
||||
|
||||
数据内容:
|
||||
%s
|
||||
|
||||
请提供结构化的分析结果。`, request.Type, string(dataJSON))
|
||||
|
||||
// 创建聊天完成请求
|
||||
chatReq := openai.ChatCompletionRequest{
|
||||
Model: config.OpenAI.Model,
|
||||
Messages: []openai.ChatCompletionMessage{
|
||||
{
|
||||
Role: openai.ChatMessageRoleSystem,
|
||||
Content: "你是一个数据分析助手,负责分析和解读各种类型的数据。",
|
||||
},
|
||||
{
|
||||
Role: openai.ChatMessageRoleUser,
|
||||
Content: prompt,
|
||||
},
|
||||
},
|
||||
Temperature: config.OpenAI.Temperature,
|
||||
MaxTokens: config.OpenAI.MaxTokens,
|
||||
}
|
||||
|
||||
// 调用OpenAI API
|
||||
logger.Debug("调用OpenAI API", slog.String("request_id", requestID), slog.String("model", config.OpenAI.Model))
|
||||
resp, err := openaiClient.CreateChatCompletion(timeoutCtx, chatReq)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("OpenAI API调用失败: %w", err)
|
||||
}
|
||||
|
||||
// 解析AI响应
|
||||
if len(resp.Choices) == 0 {
|
||||
return nil, fmt.Errorf("OpenAI未返回有效结果")
|
||||
}
|
||||
|
||||
// 尝试将响应内容解析为JSON
|
||||
var aiResult interface{}
|
||||
if err := json.Unmarshal([]byte(resp.Choices[0].Message.Content), &aiResult); err != nil {
|
||||
// 如果解析失败,直接返回原始文本
|
||||
aiResult = resp.Choices[0].Message.Content
|
||||
}
|
||||
|
||||
logger.Debug("OpenAI API调用成功", slog.String("request_id", requestID), slog.Int("token_usage", resp.Usage.TotalTokens))
|
||||
|
||||
return aiResult, nil
|
||||
}
|
||||
|
||||
// 主函数
|
||||
func main() {
|
||||
// 加载配置
|
||||
if err := loadConfig(); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "加载配置失败: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// 初始化日志
|
||||
if err := initLogger(); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "初始化日志失败: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// 初始化OpenAI客户端
|
||||
initOpenAIClient()
|
||||
|
||||
logger.Info("MCP服务器启动", slog.String("listen_addr", config.Server.ListenAddr))
|
||||
|
||||
// 创建路由器
|
||||
r := http.NewServeMux()
|
||||
|
||||
// 注册健康检查端点
|
||||
r.HandleFunc("/health", healthCheckHandler)
|
||||
|
||||
// 注册MCP提交端点
|
||||
r.HandleFunc("/mcp/v1/submit", submitHandler)
|
||||
|
||||
// 注册Prometheus指标端点
|
||||
r.Handle("/metrics", promhttp.Handler())
|
||||
|
||||
// 创建中间件链
|
||||
handler := loggingMiddleware(rateLimitMiddleware(corsMiddleware(r)))
|
||||
|
||||
// 创建HTTP服务器
|
||||
srv := &http.Server{
|
||||
Addr: config.Server.ListenAddr,
|
||||
Handler: handler,
|
||||
ReadTimeout: time.Duration(config.Server.ReadTimeout) * time.Second,
|
||||
WriteTimeout: time.Duration(config.Server.WriteTimeout) * time.Second,
|
||||
IdleTimeout: 120 * time.Second,
|
||||
}
|
||||
|
||||
// 启动服务器
|
||||
go func() {
|
||||
if err := srv.ListenAndServe(); err != nil && err != http.ErrServerClosed {
|
||||
logger.Error("服务器启动失败", slog.Error(err))
|
||||
os.Exit(1)
|
||||
}
|
||||
}()
|
||||
|
||||
logger.Info("MCP服务器已成功启动")
|
||||
|
||||
// 等待中断信号
|
||||
quit := make(chan os.Signal, 1)
|
||||
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
||||
<-quit
|
||||
|
||||
logger.Info("MCP服务器正在关闭...")
|
||||
|
||||
// 创建超时上下文
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
// 优雅关闭服务器
|
||||
if err := srv.Shutdown(ctx); err != nil {
|
||||
logger.Error("服务器关闭失败", slog.Error(err))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
logger.Info("MCP服务器已安全关闭")
|
||||
}
|
||||
Reference in New Issue
Block a user