1
0

feat: 初始化 AI Gateway 项目

实现支持 OpenAI 和 Anthropic 双协议的统一大模型 API 网关 MVP 版本,包含:
- OpenAI 和 Anthropic 协议代理
- 供应商和模型管理
- 用量统计
- 前端配置界面
This commit is contained in:
2026-04-15 16:53:28 +08:00
commit 915b004924
53 changed files with 5662 additions and 0 deletions

View File

@@ -0,0 +1,234 @@
package anthropic
import (
"encoding/json"
"fmt"
"nex/backend/internal/protocol/openai"
)
// ConvertRequest 将 Anthropic 请求转换为 OpenAI 请求
func ConvertRequest(anthropicReq *MessagesRequest) (*openai.ChatCompletionRequest, error) {
openaiReq := &openai.ChatCompletionRequest{
Model: anthropicReq.Model,
Temperature: anthropicReq.Temperature,
TopP: anthropicReq.TopP,
Stream: anthropicReq.Stream,
}
// 处理 max_tokensAnthropic 要求必须有,默认 4096
if anthropicReq.MaxTokens > 0 {
openaiReq.MaxTokens = &anthropicReq.MaxTokens
} else {
defaultMax := 4096
openaiReq.MaxTokens = &defaultMax
}
// 处理 stop_sequences
if len(anthropicReq.StopSequences) > 0 {
openaiReq.Stop = anthropicReq.StopSequences
}
// 转换 system 消息
messages := make([]openai.Message, 0)
if anthropicReq.System != "" {
messages = append(messages, openai.Message{
Role: "system",
Content: anthropicReq.System,
})
}
// 转换 messages
for _, msg := range anthropicReq.Messages {
openaiMsg, err := convertMessage(msg)
if err != nil {
return nil, err
}
messages = append(messages, openaiMsg...)
}
openaiReq.Messages = messages
// 转换 tools
if len(anthropicReq.Tools) > 0 {
openaiReq.Tools = make([]openai.Tool, len(anthropicReq.Tools))
for i, tool := range anthropicReq.Tools {
openaiReq.Tools[i] = openai.Tool{
Type: "function",
Function: openai.FunctionDefinition{
Name: tool.Name,
Description: tool.Description,
Parameters: tool.InputSchema,
},
}
}
}
// 转换 tool_choice
if anthropicReq.ToolChoice != nil {
toolChoice, err := convertToolChoice(anthropicReq.ToolChoice)
if err != nil {
return nil, err
}
openaiReq.ToolChoice = toolChoice
}
return openaiReq, nil
}
// ConvertResponse 将 OpenAI 响应转换为 Anthropic 响应
func ConvertResponse(openaiResp *openai.ChatCompletionResponse) (*MessagesResponse, error) {
anthropicResp := &MessagesResponse{
ID: openaiResp.ID,
Type: "message",
Role: "assistant",
Model: openaiResp.Model,
Usage: Usage{
InputTokens: openaiResp.Usage.PromptTokens,
OutputTokens: openaiResp.Usage.CompletionTokens,
},
}
// 转换 content
if len(openaiResp.Choices) > 0 {
choice := openaiResp.Choices[0]
content := make([]ContentBlock, 0)
if choice.Message != nil {
// 文本内容
if choice.Message.Content != "" {
if str, ok := choice.Message.Content.(string); ok && str != "" {
content = append(content, ContentBlock{
Type: "text",
Text: str,
})
}
}
// Tool calls
if len(choice.Message.ToolCalls) > 0 {
for _, tc := range choice.Message.ToolCalls {
// 解析 arguments JSON
var input interface{}
if err := json.Unmarshal([]byte(tc.Function.Arguments), &input); err != nil {
return nil, fmt.Errorf("解析 tool_call arguments 失败: %w", err)
}
content = append(content, ContentBlock{
Type: "tool_use",
ID: tc.ID,
Name: tc.Function.Name,
Input: input,
})
}
}
}
anthropicResp.Content = content
// 转换 finish_reason
switch choice.FinishReason {
case "stop":
anthropicResp.StopReason = "end_turn"
case "tool_calls":
anthropicResp.StopReason = "tool_use"
case "length":
anthropicResp.StopReason = "max_tokens"
}
}
return anthropicResp, nil
}
// convertMessage 转换单条消息
func convertMessage(msg AnthropicMessage) ([]openai.Message, error) {
var messages []openai.Message
// 处理 content
for _, block := range msg.Content {
switch block.Type {
case "text":
// 文本内容
messages = append(messages, openai.Message{
Role: msg.Role,
Content: block.Text,
})
case "tool_result":
// 工具结果
content := ""
if str, ok := block.Content.(string); ok {
content = str
} else {
// 如果是数组或其他类型,序列化为 JSON
bytes, err := json.Marshal(block.Content)
if err != nil {
return nil, fmt.Errorf("序列化 tool_result 内容失败: %w", err)
}
content = string(bytes)
}
messages = append(messages, openai.Message{
Role: "tool",
Content: content,
ToolCallID: block.ToolUseID,
})
case "image":
// MVP 不支持多模态
return nil, fmt.Errorf("MVP 不支持多模态内容(图片)")
default:
return nil, fmt.Errorf("未知的内容块类型: %s", block.Type)
}
}
// 如果没有 content创建空消息不应该发生
if len(messages) == 0 {
messages = append(messages, openai.Message{
Role: msg.Role,
Content: "",
})
}
return messages, nil
}
// convertToolChoice 转换工具选择
func convertToolChoice(choice interface{}) (interface{}, error) {
// 如果是字符串
if str, ok := choice.(string); ok {
// "auto" 或 "any" 都映射为 "auto"
if str == "auto" || str == "any" {
return "auto", nil
}
return nil, fmt.Errorf("无效的 tool_choice 字符串: %s", str)
}
// 如果是对象
if obj, ok := choice.(map[string]interface{}); ok {
choiceType, ok := obj["type"].(string)
if !ok {
return nil, fmt.Errorf("tool_choice 对象缺少 type 字段")
}
switch choiceType {
case "auto", "any":
return "auto", nil
case "tool":
name, ok := obj["name"].(string)
if !ok {
return nil, fmt.Errorf("tool_choice type=tool 缺少 name 字段")
}
return map[string]interface{}{
"type": "function",
"function": map[string]string{
"name": name,
},
}, nil
default:
return nil, fmt.Errorf("无效的 tool_choice type: %s", choiceType)
}
}
return nil, fmt.Errorf("tool_choice 格式无效")
}

View File

@@ -0,0 +1,164 @@
package anthropic
import (
"encoding/json"
"fmt"
"nex/backend/internal/protocol/openai"
)
// StreamConverter 流式转换器
type StreamConverter struct {
messageID string
model string
index int // 当前 content block index
toolCallArgs map[int]string // 缓存每个 tool_call 的 arguments
sentStart bool // 是否已发送 message_start
sentBlockStart map[int]bool // 每个 index 是否已发送 content_block_start
}
// NewStreamConverter 创建流式转换器
func NewStreamConverter(messageID, model string) *StreamConverter {
return &StreamConverter{
messageID: messageID,
model: model,
index: 0,
toolCallArgs: make(map[int]string),
sentStart: false,
sentBlockStart: make(map[int]bool),
}
}
// ConvertChunk 转换 OpenAI 流块为 Anthropic 事件
func (c *StreamConverter) ConvertChunk(chunk *openai.StreamChunk) ([]StreamEvent, error) {
var events []StreamEvent
// 发送 message_start仅一次
if !c.sentStart {
events = append(events, StreamEvent{
Type: "message_start",
Message: &MessagesResponse{
ID: c.messageID,
Type: "message",
Role: "assistant",
Model: c.model,
Content: []ContentBlock{},
Usage: Usage{
InputTokens: 0,
OutputTokens: 0,
},
},
})
c.sentStart = true
}
// 处理每个 choice
for _, choice := range chunk.Choices {
// 处理 content delta
if choice.Delta.Content != "" {
// 发送 content_block_start如果还没发送
if !c.sentBlockStart[c.index] {
events = append(events, StreamEvent{
Type: "content_block_start",
Index: c.index,
ContentBlock: &ContentBlock{
Type: "text",
},
})
c.sentBlockStart[c.index] = true
}
// 发送 text delta
events = append(events, StreamEvent{
Type: "content_block_delta",
Index: c.index,
Delta: &Delta{
Type: "text_delta",
Text: choice.Delta.Content,
},
})
}
// 处理 tool_calls delta
if len(choice.Delta.ToolCalls) > 0 {
for _, tc := range choice.Delta.ToolCalls {
// 确定 tool_call index
toolIndex := c.index + len(c.toolCallArgs)
// 发送 content_block_start如果还没发送
if !c.sentBlockStart[toolIndex] {
events = append(events, StreamEvent{
Type: "content_block_start",
Index: toolIndex,
ContentBlock: &ContentBlock{
Type: "tool_use",
ID: tc.ID,
Name: tc.Function.Name,
},
})
c.sentBlockStart[toolIndex] = true
c.toolCallArgs[toolIndex] = ""
}
// 缓存 arguments
c.toolCallArgs[toolIndex] += tc.Function.Arguments
// 发送 input delta
events = append(events, StreamEvent{
Type: "content_block_delta",
Index: toolIndex,
Delta: &Delta{
Type: "input_json_delta",
Input: tc.Function.Arguments,
},
})
}
}
// 处理 finish_reason
if choice.FinishReason != "" {
// 发送 content_block_stop
for idx := range c.sentBlockStart {
events = append(events, StreamEvent{
Type: "content_block_stop",
Index: idx,
})
}
// 转换 stop_reason
stopReason := ""
switch choice.FinishReason {
case "stop":
stopReason = "end_turn"
case "tool_calls":
stopReason = "tool_use"
case "length":
stopReason = "max_tokens"
}
// 发送 message_delta
events = append(events, StreamEvent{
Type: "message_delta",
Delta: &Delta{
StopReason: stopReason,
},
})
// 发送 message_stop
events = append(events, StreamEvent{
Type: "message_stop",
})
}
}
return events, nil
}
// SerializeEvent 序列化事件为 SSE 格式
func SerializeEvent(event StreamEvent) (string, error) {
bytes, err := json.Marshal(event)
if err != nil {
return "", err
}
return fmt.Sprintf("event: %s\ndata: %s\n\n", event.Type, string(bytes)), nil
}

View File

@@ -0,0 +1,118 @@
package anthropic
import "encoding/json"
// MessagesRequest Anthropic Messages API 请求结构
type MessagesRequest struct {
Model string `json:"model"`
Messages []AnthropicMessage `json:"messages"`
System string `json:"system,omitempty"`
MaxTokens int `json:"max_tokens"`
Temperature *float64 `json:"temperature,omitempty"`
TopP *float64 `json:"top_p,omitempty"`
TopK *int `json:"top_k,omitempty"`
StopSequences []string `json:"stop_sequences,omitempty"`
Stream bool `json:"stream,omitempty"`
Tools []AnthropicTool `json:"tools,omitempty"`
ToolChoice interface{} `json:"tool_choice,omitempty"` // 可以是字符串或对象
Metadata map[string]interface{} `json:"metadata,omitempty"`
}
// AnthropicMessage Anthropic 消息结构
type AnthropicMessage struct {
Role string `json:"role"`
Content []ContentBlock `json:"content"`
}
// ContentBlock 内容块
type ContentBlock struct {
Type string `json:"type"` // "text", "image", "tool_use", "tool_result"
Text string `json:"text,omitempty"`
Input interface{} `json:"input,omitempty"` // 用于 tool_use
// tool_use 字段
ID string `json:"id,omitempty"`
Name string `json:"name,omitempty"`
// tool_result 字段
ToolUseID string `json:"tool_use_id,omitempty"`
Content interface{} `json:"content,omitempty"` // 可以是字符串或数组
// 多模态字段MVP 不支持)
Source interface{} `json:"source,omitempty"` // 用于 image
}
// AnthropicTool Anthropic 工具定义
type AnthropicTool struct {
Name string `json:"name"`
Description string `json:"description,omitempty"`
InputSchema map[string]interface{} `json:"input_schema"`
}
// ToolChoice 工具选择
type ToolChoice struct {
Type string `json:"type"` // "auto", "any", "tool"
Name string `json:"name,omitempty"` // 当 type="tool" 时使用
}
// MessagesResponse Anthropic Messages API 响应结构
type MessagesResponse struct {
ID string `json:"id"`
Type string `json:"type"` // "message"
Role string `json:"role"` // "assistant"
Content []ContentBlock `json:"content"`
Model string `json:"model"`
StopReason string `json:"stop_reason,omitempty"` // "end_turn", "max_tokens", "stop_sequence", "tool_use"
StopSequence string `json:"stop_sequence,omitempty"`
Usage Usage `json:"usage"`
}
// Usage 使用统计
type Usage struct {
InputTokens int `json:"input_tokens"`
OutputTokens int `json:"output_tokens"`
}
// StreamEvent 流式事件
type StreamEvent struct {
Type string `json:"type"`
Message *MessagesResponse `json:"message,omitempty"` // 用于 message_start
Index int `json:"index,omitempty"` // 用于 content_block_* 事件
ContentBlock *ContentBlock `json:"content_block,omitempty"` // 用于 content_block_start
Delta *Delta `json:"delta,omitempty"` // 用于 content_block_delta
}
// Delta 增量内容
type Delta struct {
Type string `json:"type,omitempty"` // "text_delta", "input_json_delta"
Text string `json:"text,omitempty"`
Input string `json:"input,omitempty"` // 用于 tool_use 的部分 JSON
StopReason string `json:"stop_reason,omitempty"` // 用于 message_delta
Usage *Usage `json:"usage,omitempty"` // 用于 message_delta
}
// ErrorResponse Anthropic 错误响应
type ErrorResponse struct {
Type string `json:"type"` // "error"
Error ErrorDetail `json:"error"`
}
// ErrorDetail 错误详情
type ErrorDetail struct {
Type string `json:"type"` // "invalid_request_error", "authentication_error", etc.
Message string `json:"message"`
}
// ParseInputJSON 解析 tool_use 的 input从 JSON 字符串转为 map
func (cb *ContentBlock) ParseInputJSON() (map[string]interface{}, error) {
if str, ok := cb.Input.(string); ok {
var result map[string]interface{}
err := json.Unmarshal([]byte(str), &result)
return result, err
}
// 如果已经是对象,直接返回
if obj, ok := cb.Input.(map[string]interface{}); ok {
return obj, nil
}
return nil, json.Unmarshal([]byte{}, nil) // 返回错误
}

View File

@@ -0,0 +1,86 @@
package openai
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
)
// Adapter OpenAI 协议适配器(透传)
type Adapter struct{}
// NewAdapter 创建 OpenAI 适配器
func NewAdapter() *Adapter {
return &Adapter{}
}
// PrepareRequest 准备发送给供应商的请求(透传)
func (a *Adapter) PrepareRequest(req *ChatCompletionRequest, apiKey, baseURL string) (*http.Request, error) {
// 序列化请求体
body, err := json.Marshal(req)
if err != nil {
return nil, err
}
// 调试日志:打印请求体
fmt.Printf("[DEBUG] 请求Body: %s\n", string(body))
// 创建 HTTP 请求
// baseURL 已包含版本路径(如 /v1 或 /v4只需添加端点路径
httpReq, err := http.NewRequest("POST", baseURL+"/chat/completions", bytes.NewReader(body))
if err != nil {
return nil, err
}
// 设置请求头
httpReq.Header.Set("Content-Type", "application/json")
httpReq.Header.Set("Authorization", "Bearer "+apiKey)
return httpReq, nil
}
// ParseResponse 解析供应商响应(透传)
func (a *Adapter) ParseResponse(resp *http.Response) (*ChatCompletionResponse, error) {
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
defer resp.Body.Close()
var result ChatCompletionResponse
err = json.Unmarshal(body, &result)
if err != nil {
return nil, err
}
return &result, nil
}
// ParseErrorResponse 解析错误响应
func (a *Adapter) ParseErrorResponse(resp *http.Response) (*ErrorResponse, error) {
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
defer resp.Body.Close()
var result ErrorResponse
err = json.Unmarshal(body, &result)
if err != nil {
return nil, err
}
return &result, nil
}
// ParseStreamChunk 解析流式响应块
func (a *Adapter) ParseStreamChunk(data []byte) (*StreamChunk, error) {
var chunk StreamChunk
err := json.Unmarshal(data, &chunk)
if err != nil {
return nil, err
}
return &chunk, nil
}

View File

@@ -0,0 +1,131 @@
package openai
import "encoding/json"
// ChatCompletionRequest OpenAI Chat Completions API 请求结构
type ChatCompletionRequest struct {
Model string `json:"model"`
Messages []Message `json:"messages"`
Temperature *float64 `json:"temperature,omitempty"`
MaxTokens *int `json:"max_tokens,omitempty"`
TopP *float64 `json:"top_p,omitempty"`
FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"`
PresencePenalty *float64 `json:"presence_penalty,omitempty"`
Stop interface{} `json:"stop,omitempty"` // 可以是字符串或字符串数组
N *int `json:"n,omitempty"`
Stream bool `json:"stream,omitempty"`
Tools []Tool `json:"tools,omitempty"`
ToolChoice interface{} `json:"tool_choice,omitempty"` // 可以是字符串或对象
User string `json:"user,omitempty"`
}
// Message OpenAI 消息结构
type Message struct {
Role string `json:"role"`
Content interface{} `json:"content"` // 可以是字符串或数组多模态MVP不支持
Name string `json:"name,omitempty"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
ToolCallID string `json:"tool_call_id,omitempty"` // 用于 role="tool" 的消息
}
// Tool OpenAI 工具定义
type Tool struct {
Type string `json:"type"` // 目前只有 "function"
Function FunctionDefinition `json:"function"`
}
// FunctionDefinition 函数定义
type FunctionDefinition struct {
Name string `json:"name"`
Description string `json:"description,omitempty"`
Parameters map[string]interface{} `json:"parameters,omitempty"`
}
// ToolCall 工具调用
type ToolCall struct {
ID string `json:"id"`
Type string `json:"type"` // "function"
Function FunctionCall `json:"function"`
}
// FunctionCall 函数调用
type FunctionCall struct {
Name string `json:"name"`
Arguments string `json:"arguments"` // JSON 字符串
}
// ChatCompletionResponse OpenAI Chat Completions API 响应结构
type ChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Choices []Choice `json:"choices"`
Usage Usage `json:"usage"`
}
// Choice 响应选项
type Choice struct {
Index int `json:"index"`
Message *Message `json:"message,omitempty"`
Delta *Delta `json:"delta,omitempty"` // 用于流式响应
FinishReason string `json:"finish_reason"`
}
// Delta 流式响应增量
type Delta struct {
Role string `json:"role,omitempty"`
Content string `json:"content,omitempty"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
}
// Usage Token 使用统计
type Usage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
}
// StreamChunk 流式响应块
type StreamChunk struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Choices []StreamChoice `json:"choices"`
}
// StreamChoice 流式响应选项
type StreamChoice struct {
Index int `json:"index"`
Delta Delta `json:"delta"`
FinishReason string `json:"finish_reason,omitempty"`
}
// ErrorResponse OpenAI 错误响应
type ErrorResponse struct {
Error ErrorDetail `json:"error"`
}
// ErrorDetail 错误详情
type ErrorDetail struct {
Message string `json:"message"`
Type string `json:"type,omitempty"`
Code string `json:"code,omitempty"`
}
// ParseToolCallArguments 解析 tool_call 的 arguments从 JSON 字符串转为 map
func (tc *ToolCall) ParseToolCallArguments() (map[string]interface{}, error) {
var args map[string]interface{}
err := json.Unmarshal([]byte(tc.Function.Arguments), &args)
return args, err
}
// SerializeToolCallArguments 序列化 tool_call 的 arguments从 map 转为 JSON 字符串)
func SerializeToolCallArguments(args map[string]interface{}) (string, error) {
bytes, err := json.Marshal(args)
if err != nil {
return "", err
}
return string(bytes), nil
}