1
0
Files
nex/backend/internal/conversion/openai/supplemental_test.go
lanyuanxiaoyao bc1ee612d9 refactor: 实现 ConversionEngine 协议转换引擎,替代旧 protocol 包
- 新增 ConversionEngine 核心引擎,支持 OpenAI 和 Anthropic 协议转换
- 添加 stream decoder/encoder 实现
- 更新 provider client 支持新引擎
- 补充单元测试和集成测试
- 更新 specs 文档
2026-04-20 13:02:28 +08:00

435 lines
14 KiB
Go

package openai
import (
"encoding/json"
"testing"
"nex/backend/internal/conversion"
"nex/backend/internal/conversion/canonical"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestDecodeEmbeddingRequest(t *testing.T) {
body := []byte(`{"model":"text-embedding-3-small","input":"hello world","encoding_format":"float","dimensions":256}`)
req, err := decodeEmbeddingRequest(body)
require.NoError(t, err)
assert.Equal(t, "text-embedding-3-small", req.Model)
assert.Equal(t, "hello world", req.Input)
assert.Equal(t, "float", req.EncodingFormat)
require.NotNil(t, req.Dimensions)
assert.Equal(t, 256, *req.Dimensions)
}
func TestDecodeEmbeddingRequest_ArrayInput(t *testing.T) {
body := []byte(`{"model":"text-embedding","input":["hello","world"]}`)
req, err := decodeEmbeddingRequest(body)
require.NoError(t, err)
assert.Equal(t, "text-embedding", req.Model)
inputArr, ok := req.Input.([]any)
require.True(t, ok)
assert.Len(t, inputArr, 2)
}
func TestDecodeEmbeddingRequest_InvalidJSON(t *testing.T) {
_, err := decodeEmbeddingRequest([]byte(`invalid`))
assert.Error(t, err)
}
func TestDecodeEmbeddingResponse(t *testing.T) {
body := []byte(`{
"object": "list",
"data": [{"index": 0, "embedding": [0.1, 0.2, 0.3]}],
"model": "text-embedding-3-small",
"usage": {"prompt_tokens": 5, "total_tokens": 5}
}`)
resp, err := decodeEmbeddingResponse(body)
require.NoError(t, err)
assert.Equal(t, "text-embedding-3-small", resp.Model)
assert.Len(t, resp.Data, 1)
assert.Equal(t, 0, resp.Data[0].Index)
assert.Equal(t, 5, resp.Usage.PromptTokens)
}
func TestDecodeRerankRequest(t *testing.T) {
topN := 3
returnDocs := true
body := []byte(`{"model":"rerank-1","query":"what is AI","documents":["doc1","doc2"],"top_n":3,"return_documents":true}`)
req, err := decodeRerankRequest(body)
require.NoError(t, err)
assert.Equal(t, "rerank-1", req.Model)
assert.Equal(t, "what is AI", req.Query)
assert.Equal(t, []string{"doc1", "doc2"}, req.Documents)
require.NotNil(t, req.TopN)
assert.Equal(t, topN, *req.TopN)
require.NotNil(t, req.ReturnDocuments)
assert.Equal(t, returnDocs, *req.ReturnDocuments)
}
func TestDecodeRerankResponse(t *testing.T) {
doc := "relevant doc"
body := []byte(`{
"results": [{"index": 0, "relevance_score": 0.95, "document": "relevant doc"}],
"model": "rerank-1"
}`)
resp, err := decodeRerankResponse(body)
require.NoError(t, err)
assert.Equal(t, "rerank-1", resp.Model)
assert.Len(t, resp.Results, 1)
assert.Equal(t, 0, resp.Results[0].Index)
assert.InDelta(t, 0.95, resp.Results[0].RelevanceScore, 0.001)
require.NotNil(t, resp.Results[0].Document)
assert.Equal(t, doc, *resp.Results[0].Document)
}
func TestDecodeModelInfoResponse(t *testing.T) {
body := []byte(`{"id":"gpt-4","object":"model","created":1700000000,"owned_by":"openai"}`)
info, err := decodeModelInfoResponse(body)
require.NoError(t, err)
assert.Equal(t, "gpt-4", info.ID)
assert.Equal(t, int64(1700000000), info.Created)
assert.Equal(t, "openai", info.OwnedBy)
}
func TestEncodeEmbeddingRequest(t *testing.T) {
req := &canonical.CanonicalEmbeddingRequest{
Model: "text-embedding-3-small",
Input: "hello",
EncodingFormat: "float",
}
provider := conversion.NewTargetProvider("", "key", "my-embedding-model")
body, err := encodeEmbeddingRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "my-embedding-model", result["model"])
assert.Equal(t, "hello", result["input"])
assert.Equal(t, "float", result["encoding_format"])
}
func TestEncodeEmbeddingRequest_WithDimensions(t *testing.T) {
dims := 256
req := &canonical.CanonicalEmbeddingRequest{
Model: "text-embedding",
Input: "test",
Dimensions: &dims,
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeEmbeddingRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, float64(256), result["dimensions"])
}
func TestEncodeEmbeddingResponse(t *testing.T) {
resp := &canonical.CanonicalEmbeddingResponse{
Data: []canonical.EmbeddingData{{Index: 0, Embedding: []float64{0.1, 0.2}}},
Model: "text-embedding",
Usage: canonical.EmbeddingUsage{PromptTokens: 3, TotalTokens: 3},
}
body, err := encodeEmbeddingResponse(resp)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "list", result["object"])
assert.Equal(t, "text-embedding", result["model"])
}
func TestEncodeRerankRequest(t *testing.T) {
topN := 5
req := &canonical.CanonicalRerankRequest{
Model: "rerank-1",
Query: "what is AI",
Documents: []string{"doc1", "doc2"},
TopN: &topN,
}
provider := conversion.NewTargetProvider("", "key", "my-rerank-model")
body, err := encodeRerankRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "my-rerank-model", result["model"])
assert.Equal(t, "what is AI", result["query"])
}
func TestEncodeRerankResponse(t *testing.T) {
doc := "relevant passage"
resp := &canonical.CanonicalRerankResponse{
Results: []canonical.RerankResult{
{Index: 0, RelevanceScore: 0.95, Document: &doc},
},
Model: "rerank-1",
}
body, err := encodeRerankResponse(resp)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "rerank-1", result["model"])
results := result["results"].([]any)
assert.Len(t, results, 1)
}
func TestEncodeModelInfoResponse(t *testing.T) {
info := &canonical.CanonicalModelInfo{
ID: "gpt-4",
Name: "GPT-4",
Created: 1700000000,
OwnedBy: "openai",
}
body, err := encodeModelInfoResponse(info)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "gpt-4", result["id"])
assert.Equal(t, "model", result["object"])
}
func TestDecodeEmbeddingResponse_InvalidJSON(t *testing.T) {
_, err := decodeEmbeddingResponse([]byte(`invalid`))
assert.Error(t, err)
}
func TestDecodeRerankRequest_InvalidJSON(t *testing.T) {
_, err := decodeRerankRequest([]byte(`invalid`))
assert.Error(t, err)
}
func TestDecodeRerankResponse_InvalidJSON(t *testing.T) {
_, err := decodeRerankResponse([]byte(`invalid`))
assert.Error(t, err)
}
func TestDecodeModelInfoResponse_InvalidJSON(t *testing.T) {
_, err := decodeModelInfoResponse([]byte(`invalid`))
assert.Error(t, err)
}
func TestDecodeRequest_ThinkingNone(t *testing.T) {
body := []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"reasoning_effort":"none"}`)
req, err := decodeRequest(body)
require.NoError(t, err)
require.NotNil(t, req.Thinking)
assert.Equal(t, "disabled", req.Thinking.Type)
}
func TestDecodeRequest_ThinkingMinimal(t *testing.T) {
body := []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"reasoning_effort":"minimal"}`)
req, err := decodeRequest(body)
require.NoError(t, err)
require.NotNil(t, req.Thinking)
assert.Equal(t, "enabled", req.Thinking.Type)
assert.Equal(t, "low", req.Thinking.Effort)
}
func TestDecodeRequest_OutputFormat_Text(t *testing.T) {
body := []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"response_format":{"type":"text"}}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Nil(t, req.OutputFormat)
}
func TestDecodeRequest_DeprecatedFunctionCall(t *testing.T) {
body := []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"function_call":"auto","functions":[{"name":"fn1","parameters":{}}]}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Equal(t, "auto", req.ToolChoice.Type)
assert.Len(t, req.Tools, 1)
}
func TestDecodeRequest_FunctionMessage(t *testing.T) {
body := []byte(`{
"model": "gpt-4",
"messages": [
{"role": "user", "content": "hi"},
{"role": "function", "name": "get_weather", "content": "sunny"}
]
}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Len(t, req.Messages, 2)
assert.Equal(t, canonical.RoleTool, req.Messages[1].Role)
}
func TestDecodeRequest_StopString(t *testing.T) {
body := []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stop":"END"}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Equal(t, []string{"END"}, req.Parameters.StopSequences)
}
func TestDecodeRequest_StopEmptyString(t *testing.T) {
body := []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stop":""}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Nil(t, req.Parameters.StopSequences)
}
func TestDecodeResponse_EmptyChoices(t *testing.T) {
body := []byte(`{"id":"resp-1","model":"gpt-4","choices":[],"usage":{"prompt_tokens":0,"completion_tokens":0,"total_tokens":0}}`)
resp, err := decodeResponse(body)
require.NoError(t, err)
assert.Equal(t, "resp-1", resp.ID)
assert.Len(t, resp.Content, 1)
assert.Equal(t, "", resp.Content[0].Text)
}
func TestDecodeResponse_FunctionCallFinishReason(t *testing.T) {
body := []byte(`{
"id":"r1","model":"gpt-4",
"choices":[{"index":0,"message":{"role":"assistant","content":"ok"},"finish_reason":"function_call"}],
"usage":{"prompt_tokens":1,"completion_tokens":1,"total_tokens":2}
}`)
resp, err := decodeResponse(body)
require.NoError(t, err)
assert.Equal(t, canonical.StopReasonToolUse, *resp.StopReason)
}
func TestEncodeRequest_DisabledThinking(t *testing.T) {
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
Thinking: &canonical.ThinkingConfig{Type: "disabled"},
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "none", result["reasoning_effort"])
}
func TestEncodeRequest_OutputFormat_JSONObject(t *testing.T) {
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
OutputFormat: &canonical.OutputFormat{Type: "json_object"},
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
rf, ok := result["response_format"].(map[string]any)
require.True(t, ok)
assert.Equal(t, "json_object", rf["type"])
}
func TestEncodeRequest_PublicFields(t *testing.T) {
parallel := true
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
UserID: "user-123",
ParallelToolUse: &parallel,
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "user-123", result["user"])
assert.Equal(t, true, result["parallel_tool_calls"])
}
func TestEncodeResponse_UsageWithCacheAndReasoning(t *testing.T) {
cache := 80
reasoning := 20
sr := canonical.StopReasonEndTurn
resp := &canonical.CanonicalResponse{
ID: "r1",
Model: "gpt-4",
Content: []canonical.ContentBlock{canonical.NewTextBlock("ok")},
StopReason: &sr,
Usage: canonical.CanonicalUsage{
InputTokens: 100,
OutputTokens: 50,
CacheReadTokens: &cache,
ReasoningTokens: &reasoning,
},
}
body, err := encodeResponse(resp)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
usage := result["usage"].(map[string]any)
assert.Equal(t, float64(100), usage["prompt_tokens"])
ptd, ok := usage["prompt_tokens_details"].(map[string]any)
require.True(t, ok)
assert.Equal(t, float64(80), ptd["cached_tokens"])
ctd, ok := usage["completion_tokens_details"].(map[string]any)
require.True(t, ok)
assert.Equal(t, float64(20), ctd["reasoning_tokens"])
}
func TestEncodeResponse_StopReasons(t *testing.T) {
tests := []struct {
name string
stopReason canonical.StopReason
want string
}{
{"end_turn→stop", canonical.StopReasonEndTurn, "stop"},
{"max_tokens→length", canonical.StopReasonMaxTokens, "length"},
{"tool_use→tool_calls", canonical.StopReasonToolUse, "tool_calls"},
{"content_filter→content_filter", canonical.StopReasonContentFilter, "content_filter"},
{"stop_sequence→stop", canonical.StopReasonStopSequence, "stop"},
{"refusal→stop", canonical.StopReasonRefusal, "stop"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
sr := tt.stopReason
resp := &canonical.CanonicalResponse{
ID: "r1",
Model: "gpt-4",
Content: []canonical.ContentBlock{canonical.NewTextBlock("ok")},
StopReason: &sr,
Usage: canonical.CanonicalUsage{},
}
body, err := encodeResponse(resp)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
choices := result["choices"].([]any)
choice := choices[0].(map[string]any)
assert.Equal(t, tt.want, choice["finish_reason"])
})
}
}
func TestMapErrorCode_AllCodes(t *testing.T) {
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeInvalidInput))
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeMissingRequiredField))
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeIncompatibleFeature))
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeFieldMappingFailure))
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeToolCallParseError))
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeJSONParseError))
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeProtocolConstraint))
assert.Equal(t, "server_error", mapErrorCode(conversion.ErrorCodeStreamStateError))
assert.Equal(t, "server_error", mapErrorCode(conversion.ErrorCodeUTF8DecodeError))
assert.Equal(t, "server_error", mapErrorCode(conversion.ErrorCodeEncodingFailure))
assert.Equal(t, "server_error", mapErrorCode(conversion.ErrorCodeInterfaceNotSupported))
}