1
0
Files
nex/backend/internal/conversion/openai/encoder_test.go
lanyuanxiaoyao 1dac347d3b refactor: 实现 ConversionEngine 协议转换引擎,替代旧 protocol 包
引入 Canonical Model 和 ProtocolAdapter 架构,支持 OpenAI/Anthropic 协议间
无缝转换,统一 ProxyHandler 替代分散的 OpenAI/Anthropic Handler,简化
ProviderClient 为协议无关的 HTTP 发送器,Provider 新增 protocol 字段。
2026-04-20 00:36:27 +08:00

356 lines
11 KiB
Go

package openai
import (
"encoding/json"
"testing"
"nex/backend/internal/conversion"
"nex/backend/internal/conversion/canonical"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestEncodeRequest_Basic(t *testing.T) {
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
Stream: true,
}
provider := conversion.NewTargetProvider("", "key", "my-model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "my-model", result["model"])
assert.Equal(t, true, result["stream"])
msgs, ok := result["messages"].([]any)
require.True(t, ok)
assert.Len(t, msgs, 1)
}
func TestEncodeRequest_SystemInjection(t *testing.T) {
req := &canonical.CanonicalRequest{
Model: "gpt-4",
System: "你是助手",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
msgs := result["messages"].([]any)
assert.Len(t, msgs, 2)
firstMsg := msgs[0].(map[string]any)
assert.Equal(t, "system", firstMsg["role"])
assert.Equal(t, "你是助手", firstMsg["content"])
}
func TestEncodeRequest_ToolCalls(t *testing.T) {
input := json.RawMessage(`{"city":"北京"}`)
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{
{
Role: canonical.RoleAssistant,
Content: []canonical.ContentBlock{
canonical.NewToolUseBlock("call_1", "get_weather", input),
},
},
},
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
msgs := result["messages"].([]any)
assistantMsg := msgs[0].(map[string]any)
toolCalls, ok := assistantMsg["tool_calls"].([]any)
require.True(t, ok)
assert.Len(t, toolCalls, 1)
tc := toolCalls[0].(map[string]any)
assert.Equal(t, "call_1", tc["id"])
}
func TestEncodeRequest_Thinking(t *testing.T) {
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
Thinking: &canonical.ThinkingConfig{Type: "enabled", Effort: "high"},
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "high", result["reasoning_effort"])
}
func TestEncodeResponse_Basic(t *testing.T) {
sr := canonical.StopReasonEndTurn
resp := &canonical.CanonicalResponse{
ID: "resp-1",
Model: "gpt-4",
Content: []canonical.ContentBlock{canonical.NewTextBlock("你好")},
StopReason: &sr,
Usage: canonical.CanonicalUsage{InputTokens: 10, OutputTokens: 5},
}
body, err := encodeResponse(resp)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "resp-1", result["id"])
assert.Equal(t, "chat.completion", result["object"])
choices := result["choices"].([]any)
choice := choices[0].(map[string]any)
msg := choice["message"].(map[string]any)
assert.Equal(t, "你好", msg["content"])
assert.Equal(t, "stop", choice["finish_reason"])
}
func TestEncodeResponse_ToolUse(t *testing.T) {
sr := canonical.StopReasonToolUse
input := json.RawMessage(`{"q":"test"}`)
resp := &canonical.CanonicalResponse{
ID: "resp-2",
Model: "gpt-4",
Content: []canonical.ContentBlock{canonical.NewToolUseBlock("call_1", "search", input)},
StopReason: &sr,
}
body, err := encodeResponse(resp)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
choices := result["choices"].([]any)
msg := choices[0].(map[string]any)["message"].(map[string]any)
tcs, ok := msg["tool_calls"].([]any)
require.True(t, ok)
assert.Len(t, tcs, 1)
}
func TestEncodeModelsResponse(t *testing.T) {
list := &canonical.CanonicalModelList{
Models: []canonical.CanonicalModel{
{ID: "gpt-4", Created: 1700000000, OwnedBy: "openai"},
{ID: "gpt-3.5-turbo", Created: 1700000001, OwnedBy: "openai"},
},
}
body, err := encodeModelsResponse(list)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "list", result["object"])
data := result["data"].([]any)
assert.Len(t, data, 2)
}
func TestMergeConsecutiveRoles(t *testing.T) {
messages := []map[string]any{
{"role": "user", "content": "A"},
{"role": "user", "content": "B"},
{"role": "assistant", "content": "C"},
{"role": "assistant", "content": "D"},
}
result := mergeConsecutiveRoles(messages)
assert.Len(t, result, 2)
assert.Equal(t, "AB", result[0]["content"])
assert.Equal(t, "CD", result[1]["content"])
}
func TestMergeConsecutiveRoles_NotOverwriting(t *testing.T) {
messages := []map[string]any{
{"role": "user", "content": "你好"},
{"role": "user", "content": "世界"},
}
result := mergeConsecutiveRoles(messages)
assert.Len(t, result, 1)
assert.Equal(t, "你好世界", result[0]["content"])
}
func TestEncodeRequest_ToolChoice_Auto(t *testing.T) {
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
ToolChoice: canonical.NewToolChoiceAuto(),
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "auto", result["tool_choice"])
}
func TestEncodeRequest_ToolChoice_None(t *testing.T) {
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
ToolChoice: canonical.NewToolChoiceNone(),
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "none", result["tool_choice"])
}
func TestEncodeRequest_ToolChoice_Required(t *testing.T) {
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
ToolChoice: canonical.NewToolChoiceAny(),
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "required", result["tool_choice"])
}
func TestEncodeRequest_ToolChoice_Named(t *testing.T) {
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
ToolChoice: canonical.NewToolChoiceNamed("my_func"),
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
tc, ok := result["tool_choice"].(map[string]any)
require.True(t, ok)
assert.Equal(t, "function", tc["type"])
fn, ok := tc["function"].(map[string]any)
require.True(t, ok)
assert.Equal(t, "my_func", fn["name"])
}
func TestEncodeRequest_OutputFormat_JSONSchema(t *testing.T) {
schema := json.RawMessage(`{"type":"object","properties":{"name":{"type":"string"}}}`)
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
OutputFormat: &canonical.OutputFormat{
Type: "json_schema",
Name: "my_schema",
Schema: schema,
},
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
rf, ok := result["response_format"].(map[string]any)
require.True(t, ok)
assert.Equal(t, "json_schema", rf["type"])
js, ok := rf["json_schema"].(map[string]any)
require.True(t, ok)
assert.Equal(t, "my_schema", js["name"])
assert.NotNil(t, js["schema"])
}
func TestEncodeRequest_OutputFormat_Text(t *testing.T) {
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
_, hasResponseFormat := result["response_format"]
assert.False(t, hasResponseFormat)
}
func TestEncodeResponse_Thinking(t *testing.T) {
sr := canonical.StopReasonEndTurn
resp := &canonical.CanonicalResponse{
ID: "resp-thinking",
Model: "gpt-4",
Content: []canonical.ContentBlock{
canonical.NewTextBlock("回答"),
canonical.NewThinkingBlock("思考过程"),
},
StopReason: &sr,
Usage: canonical.CanonicalUsage{InputTokens: 10, OutputTokens: 5},
}
body, err := encodeResponse(resp)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
choices := result["choices"].([]any)
msg := choices[0].(map[string]any)["message"].(map[string]any)
assert.Equal(t, "回答", msg["content"])
assert.Equal(t, "思考过程", msg["reasoning_content"])
}
func TestEncodeRequest_Parameters(t *testing.T) {
temp := 0.5
maxTokens := 2048
topP := 0.9
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
Parameters: canonical.RequestParameters{
Temperature: &temp,
MaxTokens: &maxTokens,
TopP: &topP,
StopSequences: []string{"STOP", "END"},
},
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, temp, result["temperature"])
assert.Equal(t, float64(maxTokens), result["max_completion_tokens"])
assert.Equal(t, topP, result["top_p"])
stop, ok := result["stop"].([]any)
require.True(t, ok)
assert.Len(t, stop, 2)
assert.Equal(t, "STOP", stop[0])
assert.Equal(t, "END", stop[1])
}