引入 Canonical Model 和 ProtocolAdapter 架构,支持 OpenAI/Anthropic 协议间 无缝转换,统一 ProxyHandler 替代分散的 OpenAI/Anthropic Handler,简化 ProviderClient 为协议无关的 HTTP 发送器,Provider 新增 protocol 字段。
131 lines
3.4 KiB
Go
131 lines
3.4 KiB
Go
package conversion
|
|
|
|
import (
|
|
"testing"
|
|
|
|
"nex/backend/internal/conversion/canonical"
|
|
|
|
"github.com/stretchr/testify/assert"
|
|
)
|
|
|
|
func TestPassthroughStreamConverter_ProcessChunk(t *testing.T) {
|
|
converter := NewPassthroughStreamConverter()
|
|
data := []byte("hello world")
|
|
result := converter.ProcessChunk(data)
|
|
assert.Len(t, result, 1)
|
|
assert.Equal(t, data, result[0])
|
|
}
|
|
|
|
func TestPassthroughStreamConverter_Flush(t *testing.T) {
|
|
converter := NewPassthroughStreamConverter()
|
|
result := converter.Flush()
|
|
assert.Nil(t, result)
|
|
}
|
|
|
|
// mockStreamDecoder 模拟流式解码器
|
|
type mockStreamDecoder struct {
|
|
chunks [][]canonical.CanonicalStreamEvent
|
|
flush []canonical.CanonicalStreamEvent
|
|
}
|
|
|
|
// ProcessChunk 弹出下一个分片的事件
|
|
func (d *mockStreamDecoder) ProcessChunk(rawChunk []byte) []canonical.CanonicalStreamEvent {
|
|
if len(d.chunks) == 0 {
|
|
return nil
|
|
}
|
|
events := d.chunks[0]
|
|
d.chunks = d.chunks[1:]
|
|
return events
|
|
}
|
|
|
|
// Flush 返回刷新事件
|
|
func (d *mockStreamDecoder) Flush() []canonical.CanonicalStreamEvent {
|
|
return d.flush
|
|
}
|
|
|
|
// mockStreamEncoder 模拟流式编码器
|
|
type mockStreamEncoder struct {
|
|
events [][]byte
|
|
flush [][]byte
|
|
}
|
|
|
|
// EncodeEvent 返回编码后的事件
|
|
func (e *mockStreamEncoder) EncodeEvent(event canonical.CanonicalStreamEvent) [][]byte {
|
|
if len(e.events) == 0 {
|
|
return nil
|
|
}
|
|
return e.events
|
|
}
|
|
|
|
// Flush 返回编码器刷新数据
|
|
func (e *mockStreamEncoder) Flush() [][]byte {
|
|
return e.flush
|
|
}
|
|
|
|
func TestCanonicalStreamConverter_ProcessChunk(t *testing.T) {
|
|
event := canonical.NewMessageStartEvent("id-1", "gpt-4")
|
|
decoder := &mockStreamDecoder{
|
|
chunks: [][]canonical.CanonicalStreamEvent{{event}},
|
|
}
|
|
encoder := &mockStreamEncoder{
|
|
events: [][]byte{[]byte("data: test\n\n")},
|
|
}
|
|
|
|
converter := NewCanonicalStreamConverter(decoder, encoder)
|
|
result := converter.ProcessChunk([]byte("raw"))
|
|
|
|
assert.Len(t, result, 1)
|
|
assert.Equal(t, []byte("data: test\n\n"), result[0])
|
|
}
|
|
|
|
func TestCanonicalStreamConverter_WithMiddleware(t *testing.T) {
|
|
var records []string
|
|
event := canonical.NewMessageStartEvent("id-1", "gpt-4")
|
|
decoder := &mockStreamDecoder{
|
|
chunks: [][]canonical.CanonicalStreamEvent{{event}},
|
|
}
|
|
encoder := &mockStreamEncoder{
|
|
events: [][]byte{[]byte("data: ok\n\n")},
|
|
}
|
|
|
|
chain := NewMiddlewareChain()
|
|
chain.Use(&recordingMiddleware{name: "mw1", records: &records})
|
|
ctx := NewConversionContext(InterfaceTypeChat)
|
|
|
|
converter := NewCanonicalStreamConverterWithMiddleware(decoder, encoder, chain, *ctx, "openai", "anthropic")
|
|
result := converter.ProcessChunk([]byte("raw"))
|
|
|
|
assert.Len(t, result, 1)
|
|
assert.Equal(t, []string{"stream:mw1"}, records)
|
|
assert.Equal(t, []byte("data: ok\n\n"), result[0])
|
|
}
|
|
|
|
func TestCanonicalStreamConverter_Flush(t *testing.T) {
|
|
decoder := &mockStreamDecoder{
|
|
flush: []canonical.CanonicalStreamEvent{
|
|
canonical.NewMessageStopEvent(),
|
|
},
|
|
}
|
|
encoder := &mockStreamEncoder{
|
|
events: [][]byte{[]byte("data: stop\n\n")},
|
|
flush: [][]byte{[]byte("data: flush\n\n")},
|
|
}
|
|
|
|
converter := NewCanonicalStreamConverter(decoder, encoder)
|
|
result := converter.Flush()
|
|
|
|
assert.Len(t, result, 2)
|
|
assert.Equal(t, []byte("data: stop\n\n"), result[0])
|
|
assert.Equal(t, []byte("data: flush\n\n"), result[1])
|
|
}
|
|
|
|
func TestCanonicalStreamConverter_EmptyDecoder(t *testing.T) {
|
|
decoder := &mockStreamDecoder{}
|
|
encoder := &mockStreamEncoder{}
|
|
|
|
converter := NewCanonicalStreamConverter(decoder, encoder)
|
|
result := converter.ProcessChunk([]byte("raw"))
|
|
|
|
assert.Nil(t, result)
|
|
}
|