1
0

feat: 实现分层架构,包含 domain、service、repository 和 pkg 层

- 新增 domain 层:model、provider、route、stats 实体
- 新增 service 层:models、providers、routing、stats 业务逻辑
- 新增 repository 层:models、providers、stats 数据访问
- 新增 pkg 工具包:errors、logger、validator
- 新增中间件:CORS、logging、recovery、request ID
- 新增数据库迁移:初始 schema 和索引
- 新增单元测试和集成测试
- 新增规范文档:config-management、database-migration、error-handling、layered-architecture、middleware-system、request-validation、structured-logging、test-coverage
- 移除 config 子包和 model_router(已迁移至分层架构)
This commit is contained in:
2026-04-16 00:47:20 +08:00
parent 915b004924
commit f18904af1e
77 changed files with 5727 additions and 1257 deletions

View File

@@ -0,0 +1,270 @@
package anthropic
import (
"encoding/json"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"nex/backend/internal/protocol/openai"
)
func TestConvertRequest_Basic(t *testing.T) {
temp := 0.7
req := &MessagesRequest{
Model: "claude-3-opus",
MaxTokens: 1024,
Temperature: &temp,
Messages: []AnthropicMessage{
{
Role: "user",
Content: []ContentBlock{
{Type: "text", Text: "Hello"},
},
},
},
}
result, err := ConvertRequest(req)
require.NoError(t, err)
assert.Equal(t, "claude-3-opus", result.Model)
assert.Equal(t, 1024, *result.MaxTokens)
assert.Equal(t, &temp, result.Temperature)
require.Len(t, result.Messages, 1)
assert.Equal(t, "user", result.Messages[0].Role)
assert.Equal(t, "Hello", result.Messages[0].Content)
}
func TestConvertRequest_WithSystem(t *testing.T) {
req := &MessagesRequest{
Model: "claude-3-opus",
MaxTokens: 100,
System: "You are a helpful assistant.",
Messages: []AnthropicMessage{
{
Role: "user",
Content: []ContentBlock{{Type: "text", Text: "Hi"}},
},
},
}
result, err := ConvertRequest(req)
require.NoError(t, err)
require.Len(t, result.Messages, 2)
assert.Equal(t, "system", result.Messages[0].Role)
assert.Equal(t, "You are a helpful assistant.", result.Messages[0].Content)
assert.Equal(t, "user", result.Messages[1].Role)
}
func TestConvertRequest_DefaultMaxTokens(t *testing.T) {
req := &MessagesRequest{
Model: "claude-3-opus",
MaxTokens: 0, // 未设置
Messages: []AnthropicMessage{
{Role: "user", Content: []ContentBlock{{Type: "text", Text: "Hi"}}},
},
}
result, err := ConvertRequest(req)
require.NoError(t, err)
assert.Equal(t, 4096, *result.MaxTokens)
}
func TestConvertRequest_WithTools(t *testing.T) {
req := &MessagesRequest{
Model: "claude-3-opus",
MaxTokens: 100,
Messages: []AnthropicMessage{
{Role: "user", Content: []ContentBlock{{Type: "text", Text: "Hi"}}},
},
Tools: []AnthropicTool{
{
Name: "get_weather",
Description: "Get weather info",
InputSchema: map[string]interface{}{"type": "object"},
},
},
}
result, err := ConvertRequest(req)
require.NoError(t, err)
require.Len(t, result.Tools, 1)
assert.Equal(t, "function", result.Tools[0].Type)
assert.Equal(t, "get_weather", result.Tools[0].Function.Name)
}
func TestConvertRequest_WithStopSequences(t *testing.T) {
req := &MessagesRequest{
Model: "claude-3-opus",
MaxTokens: 100,
StopSequences: []string{"STOP", "END"},
Messages: []AnthropicMessage{
{Role: "user", Content: []ContentBlock{{Type: "text", Text: "Hi"}}},
},
}
result, err := ConvertRequest(req)
require.NoError(t, err)
assert.Equal(t, []string{"STOP", "END"}, result.Stop)
}
func TestConvertRequest_ToolResult(t *testing.T) {
req := &MessagesRequest{
Model: "claude-3-opus",
MaxTokens: 100,
Messages: []AnthropicMessage{
{
Role: "user",
Content: []ContentBlock{
{
Type: "tool_result",
ToolUseID: "tool_123",
Content: "result data",
},
},
},
},
}
result, err := ConvertRequest(req)
require.NoError(t, err)
require.Len(t, result.Messages, 1)
assert.Equal(t, "tool", result.Messages[0].Role)
assert.Equal(t, "tool_123", result.Messages[0].ToolCallID)
assert.Equal(t, "result data", result.Messages[0].Content)
}
func TestConvertResponse(t *testing.T) {
resp := &openai.ChatCompletionResponse{
ID: "chatcmpl-123",
Model: "gpt-4",
Choices: []openai.Choice{
{
Index: 0,
Message: &openai.Message{Role: "assistant", Content: "Hello!"},
FinishReason: "stop",
},
},
Usage: openai.Usage{PromptTokens: 10, CompletionTokens: 5},
}
result, err := ConvertResponse(resp)
require.NoError(t, err)
assert.Equal(t, "chatcmpl-123", result.ID)
assert.Equal(t, "message", result.Type)
assert.Equal(t, "assistant", result.Role)
assert.Equal(t, "end_turn", result.StopReason)
require.Len(t, result.Content, 1)
assert.Equal(t, "text", result.Content[0].Type)
assert.Equal(t, "Hello!", result.Content[0].Text)
assert.Equal(t, 10, result.Usage.InputTokens)
assert.Equal(t, 5, result.Usage.OutputTokens)
}
func TestConvertResponse_ToolCalls(t *testing.T) {
args, _ := json.Marshal(map[string]interface{}{"city": "Beijing"})
resp := &openai.ChatCompletionResponse{
ID: "chatcmpl-456",
Model: "gpt-4",
Choices: []openai.Choice{
{
Index: 0,
Message: &openai.Message{
Role: "assistant",
ToolCalls: []openai.ToolCall{
{
ID: "call_123",
Type: "function",
Function: openai.FunctionCall{
Name: "get_weather",
Arguments: string(args),
},
},
},
},
FinishReason: "tool_calls",
},
},
Usage: openai.Usage{},
}
result, err := ConvertResponse(resp)
require.NoError(t, err)
assert.Equal(t, "tool_use", result.StopReason)
require.Len(t, result.Content, 1)
assert.Equal(t, "tool_use", result.Content[0].Type)
assert.Equal(t, "call_123", result.Content[0].ID)
assert.Equal(t, "get_weather", result.Content[0].Name)
}
func TestConvertToolChoice_String(t *testing.T) {
tests := []struct {
name string
input interface{}
wantErr bool
check func(interface{})
}{
{"auto字符串", "auto", false, func(r interface{}) { assert.Equal(t, "auto", r) }},
{"any字符串", "any", false, func(r interface{}) { assert.Equal(t, "auto", r) }},
{"无效字符串", "invalid", true, nil},
{"tool对象", map[string]interface{}{"type": "tool", "name": "my_func"}, false,
func(r interface{}) {
m := r.(map[string]interface{})
assert.Equal(t, "function", m["type"])
}},
{"缺少name的tool对象", map[string]interface{}{"type": "tool"}, true, nil},
{"缺少type的对象", map[string]interface{}{"name": "func"}, true, nil},
{"无效类型", 42, true, nil},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := convertToolChoice(tt.input)
if tt.wantErr {
assert.Error(t, err)
} else {
require.NoError(t, err)
tt.check(result)
}
})
}
}
func TestValidateRequest(t *testing.T) {
t.Run("有效请求", func(t *testing.T) {
req := &MessagesRequest{
Model: "claude-3-opus",
MaxTokens: 100,
Messages: []AnthropicMessage{
{Role: "user", Content: []ContentBlock{{Type: "text", Text: "Hi"}}},
},
}
errs := ValidateRequest(req)
assert.Nil(t, errs)
})
t.Run("缺少模型", func(t *testing.T) {
req := &MessagesRequest{
MaxTokens: 100,
Messages: []AnthropicMessage{
{Role: "user", Content: []ContentBlock{{Type: "text", Text: "Hi"}}},
},
}
errs := ValidateRequest(req)
assert.NotNil(t, errs)
assert.Contains(t, errs["model"], "不能为空")
})
t.Run("MaxTokens为0", func(t *testing.T) {
req := &MessagesRequest{
Model: "claude-3-opus",
MaxTokens: 0,
Messages: []AnthropicMessage{
{Role: "user", Content: []ContentBlock{{Type: "text", Text: "Hi"}}},
},
}
errs := ValidateRequest(req)
assert.NotNil(t, errs)
})
}

View File

@@ -0,0 +1,229 @@
package anthropic
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"nex/backend/internal/protocol/openai"
)
func TestStreamConverter_MessageStart(t *testing.T) {
converter := NewStreamConverter("msg_123", "claude-3-opus")
chunk := &openai.StreamChunk{
ID: "chatcmpl-123",
Choices: []openai.StreamChoice{{Index: 0, Delta: openai.Delta{}}},
}
events, err := converter.ConvertChunk(chunk)
require.NoError(t, err)
require.NotEmpty(t, events)
// 第一个事件应该是 message_start
assert.Equal(t, "message_start", events[0].Type)
require.NotNil(t, events[0].Message)
assert.Equal(t, "msg_123", events[0].Message.ID)
assert.Equal(t, "message", events[0].Message.Type)
assert.Equal(t, "assistant", events[0].Message.Role)
assert.Equal(t, "claude-3-opus", events[0].Message.Model)
}
func TestStreamConverter_TextDelta(t *testing.T) {
converter := NewStreamConverter("msg_123", "claude-3-opus")
// 先发送一个空块以触发 message_start
chunk1 := &openai.StreamChunk{
Choices: []openai.StreamChoice{
{Delta: openai.Delta{Content: "Hello"}},
},
}
events1, err := converter.ConvertChunk(chunk1)
require.NoError(t, err)
// 应有 message_start + content_block_start + text delta
assert.GreaterOrEqual(t, len(events1), 3)
// 第二个文本块不应再发送 message_start 和 content_block_start
chunk2 := &openai.StreamChunk{
Choices: []openai.StreamChoice{
{Delta: openai.Delta{Content: " world"}},
},
}
events2, err := converter.ConvertChunk(chunk2)
require.NoError(t, err)
// 只有 text delta
assert.Len(t, events2, 1)
assert.Equal(t, "content_block_delta", events2[0].Type)
assert.Equal(t, "text_delta", events2[0].Delta.Type)
assert.Equal(t, " world", events2[0].Delta.Text)
}
func TestStreamConverter_FinishReason(t *testing.T) {
converter := NewStreamConverter("msg_123", "claude-3-opus")
chunk := &openai.StreamChunk{
Choices: []openai.StreamChoice{
{Delta: openai.Delta{Content: "Hello"}, FinishReason: "stop"},
},
}
events, err := converter.ConvertChunk(chunk)
require.NoError(t, err)
// 查找 message_delta 事件
var messageDelta *StreamEvent
for _, e := range events {
if e.Type == "message_delta" {
messageDelta = &e
break
}
}
require.NotNil(t, messageDelta)
assert.Equal(t, "end_turn", messageDelta.Delta.StopReason)
// 查找 message_stop 事件
var messageStop *StreamEvent
for _, e := range events {
if e.Type == "message_stop" {
messageStop = &e
break
}
}
assert.NotNil(t, messageStop)
}
func TestStreamConverter_FinishReasonToolCalls(t *testing.T) {
converter := NewStreamConverter("msg_123", "claude-3-opus")
chunk := &openai.StreamChunk{
Choices: []openai.StreamChoice{
{Delta: openai.Delta{}, FinishReason: "tool_calls"},
},
}
events, err := converter.ConvertChunk(chunk)
require.NoError(t, err)
var messageDelta *StreamEvent
for _, e := range events {
if e.Type == "message_delta" {
messageDelta = &e
break
}
}
require.NotNil(t, messageDelta)
assert.Equal(t, "tool_use", messageDelta.Delta.StopReason)
}
func TestStreamConverter_FinishReasonLength(t *testing.T) {
converter := NewStreamConverter("msg_123", "claude-3-opus")
chunk := &openai.StreamChunk{
Choices: []openai.StreamChoice{
{Delta: openai.Delta{}, FinishReason: "length"},
},
}
events, err := converter.ConvertChunk(chunk)
require.NoError(t, err)
var messageDelta *StreamEvent
for _, e := range events {
if e.Type == "message_delta" {
messageDelta = &e
break
}
}
require.NotNil(t, messageDelta)
assert.Equal(t, "max_tokens", messageDelta.Delta.StopReason)
}
func TestStreamConverter_ToolCalls(t *testing.T) {
converter := NewStreamConverter("msg_123", "claude-3-opus")
chunk := &openai.StreamChunk{
Choices: []openai.StreamChoice{
{
Delta: openai.Delta{
ToolCalls: []openai.ToolCall{
{
ID: "call_123",
Type: "function",
Function: openai.FunctionCall{
Name: "get_weather",
Arguments: `{"city": "Beijing"}`,
},
},
},
},
},
},
}
events, err := converter.ConvertChunk(chunk)
require.NoError(t, err)
// 应包含 content_block_start (tool_use) + content_block_delta (input_json_delta)
hasBlockStart := false
hasInputDelta := false
for _, e := range events {
if e.Type == "content_block_start" && e.ContentBlock != nil && e.ContentBlock.Type == "tool_use" {
hasBlockStart = true
assert.Equal(t, "call_123", e.ContentBlock.ID)
assert.Equal(t, "get_weather", e.ContentBlock.Name)
}
if e.Type == "content_block_delta" && e.Delta != nil && e.Delta.Type == "input_json_delta" {
hasInputDelta = true
assert.Equal(t, `{"city": "Beijing"}`, e.Delta.Input)
}
}
assert.True(t, hasBlockStart, "应有 tool_use content_block_start")
assert.True(t, hasInputDelta, "应有 input_json_delta")
}
func TestSerializeEvent(t *testing.T) {
event := StreamEvent{
Type: "message_start",
Message: &MessagesResponse{
ID: "msg_123",
Type: "message",
Role: "assistant",
},
}
result, err := SerializeEvent(event)
require.NoError(t, err)
assert.Contains(t, result, "event: message_start")
assert.Contains(t, result, "data: ")
assert.Contains(t, result, "msg_123")
}
func TestSerializeEvent_InvalidJSON(t *testing.T) {
event := StreamEvent{
Type: "test",
}
// 这个应该能正常序列化
result, err := SerializeEvent(event)
require.NoError(t, err)
assert.Contains(t, result, "event: test")
}
func TestContentBlock_ParseInputJSON(t *testing.T) {
t.Run("字符串输入", func(t *testing.T) {
cb := &ContentBlock{Input: `{"key": "value"}`}
result, err := cb.ParseInputJSON()
require.NoError(t, err)
assert.Equal(t, "value", result["key"])
})
t.Run("对象输入", func(t *testing.T) {
cb := &ContentBlock{Input: map[string]interface{}{"key": "value"}}
result, err := cb.ParseInputJSON()
require.NoError(t, err)
assert.Equal(t, "value", result["key"])
})
t.Run("无效类型", func(t *testing.T) {
cb := &ContentBlock{Input: 42}
_, err := cb.ParseInputJSON()
assert.Error(t, err)
})
}

View File

@@ -1,13 +1,20 @@
package anthropic
import "encoding/json"
import (
"encoding/json"
"fmt"
"github.com/go-playground/validator/v10"
pkgValidator "nex/backend/pkg/validator"
)
// MessagesRequest Anthropic Messages API 请求结构
type MessagesRequest struct {
Model string `json:"model"`
Messages []AnthropicMessage `json:"messages"`
Model string `json:"model" validate:"required"`
Messages []AnthropicMessage `json:"messages" validate:"required,min=1"`
System string `json:"system,omitempty"`
MaxTokens int `json:"max_tokens"`
MaxTokens int `json:"max_tokens" validate:"required,min=1"`
Temperature *float64 `json:"temperature,omitempty"`
TopP *float64 `json:"top_p,omitempty"`
TopK *int `json:"top_k,omitempty"`
@@ -114,5 +121,29 @@ func (cb *ContentBlock) ParseInputJSON() (map[string]interface{}, error) {
if obj, ok := cb.Input.(map[string]interface{}); ok {
return obj, nil
}
return nil, json.Unmarshal([]byte{}, nil) // 返回错误
return nil, fmt.Errorf("invalid input type: expected string or map")
}
// ValidateRequest 验证 MessagesRequest
func ValidateRequest(req *MessagesRequest) map[string]string {
errs := pkgValidator.Validate(req)
if errs == nil {
return nil
}
validationErrors := make(map[string]string)
for _, err := range errs.(validator.ValidationErrors) {
field := err.Field()
switch field {
case "Model":
validationErrors["model"] = "模型名称不能为空"
case "Messages":
validationErrors["messages"] = "消息列表不能为空"
case "MaxTokens":
validationErrors["max_tokens"] = "max_tokens 不能为空且必须大于 0"
default:
validationErrors[field] = fmt.Sprintf("字段 %s 验证失败: %s", field, err.Tag())
}
}
return validationErrors
}