1
0

refactor: 实现 ConversionEngine 协议转换引擎,替代旧 protocol 包

- 新增 ConversionEngine 核心引擎,支持 OpenAI 和 Anthropic 协议转换
- 添加 stream decoder/encoder 实现
- 更新 provider client 支持新引擎
- 补充单元测试和集成测试
- 更新 specs 文档
This commit is contained in:
2026-04-20 13:01:05 +08:00
parent 1dac347d3b
commit bc1ee612d9
39 changed files with 11177 additions and 995 deletions

View File

@@ -353,3 +353,120 @@ func TestStreamDecoder_MultipleChunks_Text(t *testing.T) {
}
assert.Equal(t, []string{"你好", "世界"}, deltas)
}
func TestStreamDecoder_UTF8Truncation(t *testing.T) {
d := NewStreamDecoder()
chunk := map[string]any{
"id": "chatcmpl-utf8",
"model": "gpt-4",
"choices": []any{
map[string]any{
"index": 0,
"delta": map[string]any{"content": "你"},
},
},
}
data, _ := json.Marshal(chunk)
sseData := []byte("data: " + string(data) + "\n\n")
mid := len(sseData) - 5
part1 := sseData[:mid]
part2 := sseData[mid:]
events1 := d.ProcessChunk(part1)
for _, e := range events1 {
if e.Type == canonical.EventContentBlockDelta && e.Delta != nil {
assert.Equal(t, "你", e.Delta.Text)
}
}
events2 := d.ProcessChunk(part2)
_ = events2
}
func TestStreamDecoder_ToolCallSubsequentDelta(t *testing.T) {
d := NewStreamDecoder()
idx := 0
chunk1 := map[string]any{
"id": "chatcmpl-tc",
"model": "gpt-4",
"choices": []any{
map[string]any{
"index": 0,
"delta": map[string]any{
"tool_calls": []any{
map[string]any{
"index": &idx,
"id": "call_1",
"type": "function",
"function": map[string]any{
"name": "get_weather",
"arguments": "",
},
},
},
},
},
},
}
chunk2 := map[string]any{
"id": "chatcmpl-tc",
"model": "gpt-4",
"choices": []any{
map[string]any{
"index": 0,
"delta": map[string]any{
"tool_calls": []any{
map[string]any{
"index": &idx,
"function": map[string]any{
"arguments": "{\"city\":\"Beijing\"}",
},
},
},
},
},
},
}
events1 := d.ProcessChunk(makeChunkSSE(chunk1))
require.NotEmpty(t, events1)
events2 := d.ProcessChunk(makeChunkSSE(chunk2))
require.NotEmpty(t, events2)
foundInputJSON := false
for _, e := range events2 {
if e.Type == canonical.EventContentBlockDelta && e.Delta != nil && e.Delta.Type == "input_json_delta" {
foundInputJSON = true
assert.Equal(t, "{\"city\":\"Beijing\"}", e.Delta.PartialJSON)
}
}
assert.True(t, foundInputJSON, "subsequent tool call delta should emit input_json_delta")
}
func TestStreamDecoder_InvalidJSON(t *testing.T) {
d := NewStreamDecoder()
raw := []byte("data: {invalid json}\n\n")
events := d.ProcessChunk(raw)
assert.Nil(t, events)
}
func TestStreamDecoder_NonDataLines(t *testing.T) {
d := NewStreamDecoder()
raw := []byte(": comment line\ndata: {\"id\":\"1\",\"choices\":[{\"delta\":{\"content\":\"hi\"}}]}\n\n")
events := d.ProcessChunk(raw)
require.NotEmpty(t, events)
found := false
for _, e := range events {
if e.Type == canonical.EventContentBlockDelta && e.Delta != nil {
found = true
assert.Equal(t, "hi", e.Delta.Text)
}
}
assert.True(t, found)
}

View File

@@ -137,15 +137,10 @@ func (e *StreamEncoder) encodeInputJSONDelta(event canonical.CanonicalStreamEven
}
// 后续 delta仅含 arguments
// 通过 index 查找 tool call
// 使用 canonical 事件中的 index 直接映射到 OpenAI tool_calls index
tcIdx := 0
if event.Index != nil {
for id, idx := range e.toolCallIndexMap {
if idx == tcIdx {
_ = id
break
}
}
tcIdx = *event.Index
}
delta := map[string]any{
"tool_calls": []map[string]any{{

View File

@@ -170,3 +170,116 @@ func TestStreamEncoder_MessageDelta_WithUsage(t *testing.T) {
assert.Contains(t, s, "usage")
assert.Contains(t, s, "prompt_tokens")
}
func TestStreamEncoder_InputJSONDelta_SubsequentDelta(t *testing.T) {
e := NewStreamEncoder()
e.EncodeEvent(canonical.NewContentBlockStartEvent(0, canonical.StreamContentBlock{
Type: "tool_use",
ID: "call_1",
Name: "get_weather",
}))
e.EncodeEvent(canonical.NewContentBlockDeltaEvent(0, canonical.StreamDelta{
Type: string(canonical.DeltaTypeInputJSON),
PartialJSON: "{\"city\":",
}))
event := canonical.NewContentBlockDeltaEvent(0, canonical.StreamDelta{
Type: string(canonical.DeltaTypeInputJSON),
PartialJSON: "\"Beijing\"}",
})
chunks := e.EncodeEvent(event)
require.NotEmpty(t, chunks)
s := string(chunks[0])
assert.Contains(t, s, "tool_calls")
assert.Contains(t, s, "Beijing")
}
func TestStreamEncoder_MessageStart_NilMessage(t *testing.T) {
e := NewStreamEncoder()
event := canonical.CanonicalStreamEvent{Type: canonical.EventMessageStart}
chunks := e.EncodeEvent(event)
require.Len(t, chunks, 1)
s := string(chunks[0])
assert.Contains(t, s, "chat.completion.chunk")
}
func TestStreamEncoder_UnknownEvent_ReturnsNil(t *testing.T) {
e := NewStreamEncoder()
event := canonical.CanonicalStreamEvent{Type: "unknown_type"}
chunks := e.EncodeEvent(event)
assert.Nil(t, chunks)
}
func TestStreamEncoder_ContentBlockDelta_NilDelta(t *testing.T) {
e := NewStreamEncoder()
event := canonical.CanonicalStreamEvent{Type: canonical.EventContentBlockDelta}
chunks := e.EncodeEvent(event)
assert.Nil(t, chunks)
}
func TestStreamEncoder_MultiToolCall_IndexMapping(t *testing.T) {
e := NewStreamEncoder()
e.EncodeEvent(canonical.NewContentBlockStartEvent(0, canonical.StreamContentBlock{
Type: "tool_use",
ID: "call_1",
Name: "get_weather",
}))
firstDelta := canonical.NewContentBlockDeltaEvent(0, canonical.StreamDelta{
Type: string(canonical.DeltaTypeInputJSON),
PartialJSON: `{"city":"北京"}`,
})
chunks := e.EncodeEvent(firstDelta)
require.NotEmpty(t, chunks)
s := string(chunks[0])
assert.Contains(t, s, `"index":0`)
assert.Contains(t, s, "get_weather")
assert.Contains(t, s, "北京")
e.EncodeEvent(canonical.NewContentBlockStopEvent(0))
e.EncodeEvent(canonical.NewContentBlockStartEvent(1, canonical.StreamContentBlock{
Type: "tool_use",
ID: "call_2",
Name: "get_time",
}))
secondDelta := canonical.NewContentBlockDeltaEvent(1, canonical.StreamDelta{
Type: string(canonical.DeltaTypeInputJSON),
PartialJSON: `{"tz":"Asia/Shanghai"}`,
})
chunks = e.EncodeEvent(secondDelta)
require.NotEmpty(t, chunks)
s = string(chunks[0])
assert.Contains(t, s, `"index":1`)
assert.Contains(t, s, "get_time")
assert.Contains(t, s, "Asia/Shanghai")
subsequentDelta0 := canonical.NewContentBlockDeltaEvent(0, canonical.StreamDelta{
Type: string(canonical.DeltaTypeInputJSON),
PartialJSON: `"more_data"`,
})
chunks = e.EncodeEvent(subsequentDelta0)
require.NotEmpty(t, chunks)
s = string(chunks[0])
assert.Contains(t, s, `"index":0`)
assert.NotContains(t, s, "get_weather")
assert.Contains(t, s, "more_data")
subsequentDelta1 := canonical.NewContentBlockDeltaEvent(1, canonical.StreamDelta{
Type: string(canonical.DeltaTypeInputJSON),
PartialJSON: `"more_time"`,
})
chunks = e.EncodeEvent(subsequentDelta1)
require.NotEmpty(t, chunks)
s = string(chunks[0])
assert.Contains(t, s, `"index":1`)
assert.Contains(t, s, "more_time")
}

View File

@@ -0,0 +1,434 @@
package openai
import (
"encoding/json"
"testing"
"nex/backend/internal/conversion"
"nex/backend/internal/conversion/canonical"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestDecodeEmbeddingRequest(t *testing.T) {
body := []byte(`{"model":"text-embedding-3-small","input":"hello world","encoding_format":"float","dimensions":256}`)
req, err := decodeEmbeddingRequest(body)
require.NoError(t, err)
assert.Equal(t, "text-embedding-3-small", req.Model)
assert.Equal(t, "hello world", req.Input)
assert.Equal(t, "float", req.EncodingFormat)
require.NotNil(t, req.Dimensions)
assert.Equal(t, 256, *req.Dimensions)
}
func TestDecodeEmbeddingRequest_ArrayInput(t *testing.T) {
body := []byte(`{"model":"text-embedding","input":["hello","world"]}`)
req, err := decodeEmbeddingRequest(body)
require.NoError(t, err)
assert.Equal(t, "text-embedding", req.Model)
inputArr, ok := req.Input.([]any)
require.True(t, ok)
assert.Len(t, inputArr, 2)
}
func TestDecodeEmbeddingRequest_InvalidJSON(t *testing.T) {
_, err := decodeEmbeddingRequest([]byte(`invalid`))
assert.Error(t, err)
}
func TestDecodeEmbeddingResponse(t *testing.T) {
body := []byte(`{
"object": "list",
"data": [{"index": 0, "embedding": [0.1, 0.2, 0.3]}],
"model": "text-embedding-3-small",
"usage": {"prompt_tokens": 5, "total_tokens": 5}
}`)
resp, err := decodeEmbeddingResponse(body)
require.NoError(t, err)
assert.Equal(t, "text-embedding-3-small", resp.Model)
assert.Len(t, resp.Data, 1)
assert.Equal(t, 0, resp.Data[0].Index)
assert.Equal(t, 5, resp.Usage.PromptTokens)
}
func TestDecodeRerankRequest(t *testing.T) {
topN := 3
returnDocs := true
body := []byte(`{"model":"rerank-1","query":"what is AI","documents":["doc1","doc2"],"top_n":3,"return_documents":true}`)
req, err := decodeRerankRequest(body)
require.NoError(t, err)
assert.Equal(t, "rerank-1", req.Model)
assert.Equal(t, "what is AI", req.Query)
assert.Equal(t, []string{"doc1", "doc2"}, req.Documents)
require.NotNil(t, req.TopN)
assert.Equal(t, topN, *req.TopN)
require.NotNil(t, req.ReturnDocuments)
assert.Equal(t, returnDocs, *req.ReturnDocuments)
}
func TestDecodeRerankResponse(t *testing.T) {
doc := "relevant doc"
body := []byte(`{
"results": [{"index": 0, "relevance_score": 0.95, "document": "relevant doc"}],
"model": "rerank-1"
}`)
resp, err := decodeRerankResponse(body)
require.NoError(t, err)
assert.Equal(t, "rerank-1", resp.Model)
assert.Len(t, resp.Results, 1)
assert.Equal(t, 0, resp.Results[0].Index)
assert.InDelta(t, 0.95, resp.Results[0].RelevanceScore, 0.001)
require.NotNil(t, resp.Results[0].Document)
assert.Equal(t, doc, *resp.Results[0].Document)
}
func TestDecodeModelInfoResponse(t *testing.T) {
body := []byte(`{"id":"gpt-4","object":"model","created":1700000000,"owned_by":"openai"}`)
info, err := decodeModelInfoResponse(body)
require.NoError(t, err)
assert.Equal(t, "gpt-4", info.ID)
assert.Equal(t, int64(1700000000), info.Created)
assert.Equal(t, "openai", info.OwnedBy)
}
func TestEncodeEmbeddingRequest(t *testing.T) {
req := &canonical.CanonicalEmbeddingRequest{
Model: "text-embedding-3-small",
Input: "hello",
EncodingFormat: "float",
}
provider := conversion.NewTargetProvider("", "key", "my-embedding-model")
body, err := encodeEmbeddingRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "my-embedding-model", result["model"])
assert.Equal(t, "hello", result["input"])
assert.Equal(t, "float", result["encoding_format"])
}
func TestEncodeEmbeddingRequest_WithDimensions(t *testing.T) {
dims := 256
req := &canonical.CanonicalEmbeddingRequest{
Model: "text-embedding",
Input: "test",
Dimensions: &dims,
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeEmbeddingRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, float64(256), result["dimensions"])
}
func TestEncodeEmbeddingResponse(t *testing.T) {
resp := &canonical.CanonicalEmbeddingResponse{
Data: []canonical.EmbeddingData{{Index: 0, Embedding: []float64{0.1, 0.2}}},
Model: "text-embedding",
Usage: canonical.EmbeddingUsage{PromptTokens: 3, TotalTokens: 3},
}
body, err := encodeEmbeddingResponse(resp)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "list", result["object"])
assert.Equal(t, "text-embedding", result["model"])
}
func TestEncodeRerankRequest(t *testing.T) {
topN := 5
req := &canonical.CanonicalRerankRequest{
Model: "rerank-1",
Query: "what is AI",
Documents: []string{"doc1", "doc2"},
TopN: &topN,
}
provider := conversion.NewTargetProvider("", "key", "my-rerank-model")
body, err := encodeRerankRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "my-rerank-model", result["model"])
assert.Equal(t, "what is AI", result["query"])
}
func TestEncodeRerankResponse(t *testing.T) {
doc := "relevant passage"
resp := &canonical.CanonicalRerankResponse{
Results: []canonical.RerankResult{
{Index: 0, RelevanceScore: 0.95, Document: &doc},
},
Model: "rerank-1",
}
body, err := encodeRerankResponse(resp)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "rerank-1", result["model"])
results := result["results"].([]any)
assert.Len(t, results, 1)
}
func TestEncodeModelInfoResponse(t *testing.T) {
info := &canonical.CanonicalModelInfo{
ID: "gpt-4",
Name: "GPT-4",
Created: 1700000000,
OwnedBy: "openai",
}
body, err := encodeModelInfoResponse(info)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "gpt-4", result["id"])
assert.Equal(t, "model", result["object"])
}
func TestDecodeEmbeddingResponse_InvalidJSON(t *testing.T) {
_, err := decodeEmbeddingResponse([]byte(`invalid`))
assert.Error(t, err)
}
func TestDecodeRerankRequest_InvalidJSON(t *testing.T) {
_, err := decodeRerankRequest([]byte(`invalid`))
assert.Error(t, err)
}
func TestDecodeRerankResponse_InvalidJSON(t *testing.T) {
_, err := decodeRerankResponse([]byte(`invalid`))
assert.Error(t, err)
}
func TestDecodeModelInfoResponse_InvalidJSON(t *testing.T) {
_, err := decodeModelInfoResponse([]byte(`invalid`))
assert.Error(t, err)
}
func TestDecodeRequest_ThinkingNone(t *testing.T) {
body := []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"reasoning_effort":"none"}`)
req, err := decodeRequest(body)
require.NoError(t, err)
require.NotNil(t, req.Thinking)
assert.Equal(t, "disabled", req.Thinking.Type)
}
func TestDecodeRequest_ThinkingMinimal(t *testing.T) {
body := []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"reasoning_effort":"minimal"}`)
req, err := decodeRequest(body)
require.NoError(t, err)
require.NotNil(t, req.Thinking)
assert.Equal(t, "enabled", req.Thinking.Type)
assert.Equal(t, "low", req.Thinking.Effort)
}
func TestDecodeRequest_OutputFormat_Text(t *testing.T) {
body := []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"response_format":{"type":"text"}}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Nil(t, req.OutputFormat)
}
func TestDecodeRequest_DeprecatedFunctionCall(t *testing.T) {
body := []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"function_call":"auto","functions":[{"name":"fn1","parameters":{}}]}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Equal(t, "auto", req.ToolChoice.Type)
assert.Len(t, req.Tools, 1)
}
func TestDecodeRequest_FunctionMessage(t *testing.T) {
body := []byte(`{
"model": "gpt-4",
"messages": [
{"role": "user", "content": "hi"},
{"role": "function", "name": "get_weather", "content": "sunny"}
]
}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Len(t, req.Messages, 2)
assert.Equal(t, canonical.RoleTool, req.Messages[1].Role)
}
func TestDecodeRequest_StopString(t *testing.T) {
body := []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stop":"END"}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Equal(t, []string{"END"}, req.Parameters.StopSequences)
}
func TestDecodeRequest_StopEmptyString(t *testing.T) {
body := []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stop":""}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Nil(t, req.Parameters.StopSequences)
}
func TestDecodeResponse_EmptyChoices(t *testing.T) {
body := []byte(`{"id":"resp-1","model":"gpt-4","choices":[],"usage":{"prompt_tokens":0,"completion_tokens":0,"total_tokens":0}}`)
resp, err := decodeResponse(body)
require.NoError(t, err)
assert.Equal(t, "resp-1", resp.ID)
assert.Len(t, resp.Content, 1)
assert.Equal(t, "", resp.Content[0].Text)
}
func TestDecodeResponse_FunctionCallFinishReason(t *testing.T) {
body := []byte(`{
"id":"r1","model":"gpt-4",
"choices":[{"index":0,"message":{"role":"assistant","content":"ok"},"finish_reason":"function_call"}],
"usage":{"prompt_tokens":1,"completion_tokens":1,"total_tokens":2}
}`)
resp, err := decodeResponse(body)
require.NoError(t, err)
assert.Equal(t, canonical.StopReasonToolUse, *resp.StopReason)
}
func TestEncodeRequest_DisabledThinking(t *testing.T) {
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
Thinking: &canonical.ThinkingConfig{Type: "disabled"},
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "none", result["reasoning_effort"])
}
func TestEncodeRequest_OutputFormat_JSONObject(t *testing.T) {
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
OutputFormat: &canonical.OutputFormat{Type: "json_object"},
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
rf, ok := result["response_format"].(map[string]any)
require.True(t, ok)
assert.Equal(t, "json_object", rf["type"])
}
func TestEncodeRequest_PublicFields(t *testing.T) {
parallel := true
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
UserID: "user-123",
ParallelToolUse: &parallel,
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "user-123", result["user"])
assert.Equal(t, true, result["parallel_tool_calls"])
}
func TestEncodeResponse_UsageWithCacheAndReasoning(t *testing.T) {
cache := 80
reasoning := 20
sr := canonical.StopReasonEndTurn
resp := &canonical.CanonicalResponse{
ID: "r1",
Model: "gpt-4",
Content: []canonical.ContentBlock{canonical.NewTextBlock("ok")},
StopReason: &sr,
Usage: canonical.CanonicalUsage{
InputTokens: 100,
OutputTokens: 50,
CacheReadTokens: &cache,
ReasoningTokens: &reasoning,
},
}
body, err := encodeResponse(resp)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
usage := result["usage"].(map[string]any)
assert.Equal(t, float64(100), usage["prompt_tokens"])
ptd, ok := usage["prompt_tokens_details"].(map[string]any)
require.True(t, ok)
assert.Equal(t, float64(80), ptd["cached_tokens"])
ctd, ok := usage["completion_tokens_details"].(map[string]any)
require.True(t, ok)
assert.Equal(t, float64(20), ctd["reasoning_tokens"])
}
func TestEncodeResponse_StopReasons(t *testing.T) {
tests := []struct {
name string
stopReason canonical.StopReason
want string
}{
{"end_turn→stop", canonical.StopReasonEndTurn, "stop"},
{"max_tokens→length", canonical.StopReasonMaxTokens, "length"},
{"tool_use→tool_calls", canonical.StopReasonToolUse, "tool_calls"},
{"content_filter→content_filter", canonical.StopReasonContentFilter, "content_filter"},
{"stop_sequence→stop", canonical.StopReasonStopSequence, "stop"},
{"refusal→stop", canonical.StopReasonRefusal, "stop"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
sr := tt.stopReason
resp := &canonical.CanonicalResponse{
ID: "r1",
Model: "gpt-4",
Content: []canonical.ContentBlock{canonical.NewTextBlock("ok")},
StopReason: &sr,
Usage: canonical.CanonicalUsage{},
}
body, err := encodeResponse(resp)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
choices := result["choices"].([]any)
choice := choices[0].(map[string]any)
assert.Equal(t, tt.want, choice["finish_reason"])
})
}
}
func TestMapErrorCode_AllCodes(t *testing.T) {
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeInvalidInput))
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeMissingRequiredField))
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeIncompatibleFeature))
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeFieldMappingFailure))
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeToolCallParseError))
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeJSONParseError))
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeProtocolConstraint))
assert.Equal(t, "server_error", mapErrorCode(conversion.ErrorCodeStreamStateError))
assert.Equal(t, "server_error", mapErrorCode(conversion.ErrorCodeUTF8DecodeError))
assert.Equal(t, "server_error", mapErrorCode(conversion.ErrorCodeEncodingFailure))
assert.Equal(t, "server_error", mapErrorCode(conversion.ErrorCodeInterfaceNotSupported))
}