1
0

refactor: 实现 ConversionEngine 协议转换引擎,替代旧 protocol 包

- 新增 ConversionEngine 核心引擎,支持 OpenAI 和 Anthropic 协议转换
- 添加 stream decoder/encoder 实现
- 更新 provider client 支持新引擎
- 补充单元测试和集成测试
- 更新 specs 文档
This commit is contained in:
2026-04-20 13:01:05 +08:00
parent 1dac347d3b
commit bc1ee612d9
39 changed files with 11177 additions and 995 deletions

View File

@@ -272,3 +272,218 @@ func TestStreamDecoder_RedactedDeltaSuppressed(t *testing.T) {
events := d.ProcessChunk(raw)
assert.Empty(t, events)
}
func TestStreamDecoder_ServerToolUse_Suppressed(t *testing.T) {
d := NewStreamDecoder()
payload := map[string]any{
"type": "content_block_start",
"index": 2,
"content_block": map[string]any{
"type": "server_tool_use",
"id": "server_tool_1",
"name": "web_search",
},
}
raw := makeAnthropicEvent("content_block_start", payload)
events := d.ProcessChunk(raw)
assert.Empty(t, events)
assert.True(t, d.redactedBlocks[2])
}
func TestStreamDecoder_WebSearchToolResult_Suppressed(t *testing.T) {
d := NewStreamDecoder()
payload := map[string]any{
"type": "content_block_start",
"index": 3,
"content_block": map[string]any{
"type": "web_search_tool_result",
"tool_use_id": "search_1",
},
}
raw := makeAnthropicEvent("content_block_start", payload)
events := d.ProcessChunk(raw)
assert.Empty(t, events)
assert.True(t, d.redactedBlocks[3])
}
func TestStreamDecoder_CodeExecutionToolResult_Suppressed(t *testing.T) {
d := NewStreamDecoder()
payload := map[string]any{
"type": "content_block_start",
"index": 4,
"content_block": map[string]any{
"type": "code_execution_tool_result",
},
}
raw := makeAnthropicEvent("content_block_start", payload)
events := d.ProcessChunk(raw)
assert.Empty(t, events)
assert.True(t, d.redactedBlocks[4])
}
func TestStreamDecoder_CitationsDelta_Discarded(t *testing.T) {
d := NewStreamDecoder()
payload := map[string]any{
"type": "content_block_delta",
"index": 0,
"delta": map[string]any{
"type": "citations_delta",
"citation": map[string]any{"title": "ref1"},
},
}
raw := makeAnthropicEvent("content_block_delta", payload)
events := d.ProcessChunk(raw)
assert.Empty(t, events)
}
func TestStreamDecoder_SignatureDelta_Discarded(t *testing.T) {
d := NewStreamDecoder()
payload := map[string]any{
"type": "content_block_delta",
"index": 0,
"delta": map[string]any{
"type": "signature_delta",
"signature": "sig_123",
},
}
raw := makeAnthropicEvent("content_block_delta", payload)
events := d.ProcessChunk(raw)
assert.Empty(t, events)
}
func TestStreamDecoder_UnknownEventType(t *testing.T) {
d := NewStreamDecoder()
raw := makeAnthropicEvent("unknown_event", map[string]any{"type": "unknown_event"})
events := d.ProcessChunk(raw)
assert.Empty(t, events)
}
func TestStreamDecoder_InvalidJSON(t *testing.T) {
d := NewStreamDecoder()
raw := []byte("event: message_start\ndata: {invalid}\n\n")
events := d.ProcessChunk(raw)
assert.Empty(t, events)
}
func TestStreamDecoder_MultipleEventsInSingleChunk(t *testing.T) {
d := NewStreamDecoder()
startPayload := map[string]any{
"type": "message_start",
"message": map[string]any{
"id": "msg_multi",
"model": "claude-3",
},
}
deltaPayload := map[string]any{
"type": "content_block_delta",
"index": 0,
"delta": map[string]any{
"type": "text_delta",
"text": "Hello",
},
}
stopPayload := map[string]any{"type": "message_stop"}
var raw []byte
raw = append(raw, makeAnthropicEvent("message_start", startPayload)...)
raw = append(raw, makeAnthropicEvent("content_block_delta", deltaPayload)...)
raw = append(raw, makeAnthropicEvent("message_stop", stopPayload)...)
events := d.ProcessChunk(raw)
require.Len(t, events, 3)
assert.Equal(t, canonical.EventMessageStart, events[0].Type)
assert.Equal(t, canonical.EventContentBlockDelta, events[1].Type)
assert.Equal(t, canonical.EventMessageStop, events[2].Type)
}
func TestStreamDecoder_ErrorInvalidJSON(t *testing.T) {
d := NewStreamDecoder()
raw := []byte("event: error\ndata: {invalid}\n\n")
events := d.ProcessChunk(raw)
require.Len(t, events, 1)
assert.Equal(t, canonical.EventError, events[0].Type)
assert.Contains(t, events[0].Error.Message, "解析错误事件失败")
}
func TestStreamDecoder_MessageStartWithUsage(t *testing.T) {
d := NewStreamDecoder()
payload := map[string]any{
"type": "message_start",
"message": map[string]any{
"id": "msg_usage",
"model": "claude-3",
"usage": map[string]any{"input_tokens": 25, "output_tokens": 0},
},
}
raw := makeAnthropicEvent("message_start", payload)
events := d.ProcessChunk(raw)
require.Len(t, events, 1)
assert.Equal(t, canonical.EventMessageStart, events[0].Type)
require.NotNil(t, events[0].Message.Usage)
assert.Equal(t, 25, events[0].Message.Usage.InputTokens)
}
func TestStreamDecoder_ThinkingBlockStart(t *testing.T) {
d := NewStreamDecoder()
payload := map[string]any{
"type": "content_block_start",
"index": 0,
"content_block": map[string]any{
"type": "thinking",
"thinking": "",
},
}
raw := makeAnthropicEvent("content_block_start", payload)
events := d.ProcessChunk(raw)
require.Len(t, events, 1)
assert.Equal(t, canonical.EventContentBlockStart, events[0].Type)
require.NotNil(t, events[0].ContentBlock)
assert.Equal(t, "thinking", events[0].ContentBlock.Type)
}
func TestStreamDecoder_MessageDelta_UsageNotAccumulated(t *testing.T) {
d := NewStreamDecoder()
startPayload := map[string]any{
"type": "message_start",
"message": map[string]any{
"id": "msg_usage_test",
"model": "claude-3",
"usage": map[string]any{"input_tokens": 10, "output_tokens": 0},
},
}
deltaPayload1 := map[string]any{
"type": "message_delta",
"delta": map[string]any{"stop_reason": "end_turn"},
"usage": map[string]any{"output_tokens": 25},
}
d.ProcessChunk(makeAnthropicEvent("message_start", startPayload))
events := d.ProcessChunk(makeAnthropicEvent("message_delta", deltaPayload1))
require.Len(t, events, 1)
assert.Equal(t, 25, events[0].Usage.OutputTokens)
deltaPayload2 := map[string]any{
"type": "message_delta",
"delta": map[string]any{"stop_reason": "end_turn"},
"usage": map[string]any{"output_tokens": 30},
}
events = d.ProcessChunk(makeAnthropicEvent("message_delta", deltaPayload2))
require.Len(t, events, 1)
assert.Equal(t, 30, events[0].Usage.OutputTokens, "output_tokens should be replaced, not accumulated")
assert.Equal(t, 30, d.accumulatedUsage.OutputTokens, "accumulated usage should match last value")
}