实现统一模型 ID 格式 (provider_id/model_name),支持跨协议模型标识和 Smart Passthrough。 核心变更: - 新增 pkg/modelid 包:解析、格式化、校验统一模型 ID - 数据库迁移:models 表使用 UUID 主键 + UNIQUE(provider_id, model_name) 约束 - Repository 层:FindByProviderAndModelName、ListEnabled 方法 - Service 层:联合唯一校验、provider ID 字符集校验 - Conversion 层:ExtractModelName、RewriteRequestModelName/RewriteResponseModelName 方法 - Handler 层:统一模型 ID 路由、Smart Passthrough、Models API 本地聚合 - 新增 error-responses、unified-model-id 规范 测试覆盖: - 单元测试:modelid、conversion、handler、service、repository - 集成测试:统一模型 ID 路由、Smart Passthrough 保真性、跨协议转换 - 迁移测试:UUID 主键、UNIQUE 约束、级联删除 OpenSpec: - 归档 unified-model-id 变更到 archive/2026-04-21-unified-model-id - 同步 11 个 delta specs 到 main specs - 新增 error-responses、unified-model-id 规范文件
200 lines
5.9 KiB
Go
200 lines
5.9 KiB
Go
package conversion
|
|
|
|
import (
|
|
"fmt"
|
|
"testing"
|
|
|
|
"nex/backend/internal/conversion/canonical"
|
|
|
|
"github.com/stretchr/testify/assert"
|
|
)
|
|
|
|
func TestPassthroughStreamConverter_ProcessChunk(t *testing.T) {
|
|
converter := NewPassthroughStreamConverter()
|
|
data := []byte("hello world")
|
|
result := converter.ProcessChunk(data)
|
|
assert.Len(t, result, 1)
|
|
assert.Equal(t, data, result[0])
|
|
}
|
|
|
|
func TestPassthroughStreamConverter_Flush(t *testing.T) {
|
|
converter := NewPassthroughStreamConverter()
|
|
result := converter.Flush()
|
|
assert.Nil(t, result)
|
|
}
|
|
|
|
// mockStreamDecoder 模拟流式解码器
|
|
type mockStreamDecoder struct {
|
|
chunks [][]canonical.CanonicalStreamEvent
|
|
flush []canonical.CanonicalStreamEvent
|
|
}
|
|
|
|
// ProcessChunk 弹出下一个分片的事件
|
|
func (d *mockStreamDecoder) ProcessChunk(rawChunk []byte) []canonical.CanonicalStreamEvent {
|
|
if len(d.chunks) == 0 {
|
|
return nil
|
|
}
|
|
events := d.chunks[0]
|
|
d.chunks = d.chunks[1:]
|
|
return events
|
|
}
|
|
|
|
// Flush 返回刷新事件
|
|
func (d *mockStreamDecoder) Flush() []canonical.CanonicalStreamEvent {
|
|
return d.flush
|
|
}
|
|
|
|
// mockStreamEncoder 模拟流式编码器
|
|
type mockStreamEncoder struct {
|
|
events [][]byte
|
|
flush [][]byte
|
|
}
|
|
|
|
// EncodeEvent 返回编码后的事件
|
|
func (e *mockStreamEncoder) EncodeEvent(event canonical.CanonicalStreamEvent) [][]byte {
|
|
if len(e.events) == 0 {
|
|
return nil
|
|
}
|
|
return e.events
|
|
}
|
|
|
|
// Flush 返回编码器刷新数据
|
|
func (e *mockStreamEncoder) Flush() [][]byte {
|
|
return e.flush
|
|
}
|
|
|
|
func TestCanonicalStreamConverter_ProcessChunk(t *testing.T) {
|
|
event := canonical.NewMessageStartEvent("id-1", "gpt-4")
|
|
decoder := &mockStreamDecoder{
|
|
chunks: [][]canonical.CanonicalStreamEvent{{event}},
|
|
}
|
|
encoder := &mockStreamEncoder{
|
|
events: [][]byte{[]byte("data: test\n\n")},
|
|
}
|
|
|
|
converter := NewCanonicalStreamConverter(decoder, encoder)
|
|
result := converter.ProcessChunk([]byte("raw"))
|
|
|
|
assert.Len(t, result, 1)
|
|
assert.Equal(t, []byte("data: test\n\n"), result[0])
|
|
}
|
|
|
|
func TestCanonicalStreamConverter_WithMiddleware(t *testing.T) {
|
|
var records []string
|
|
event := canonical.NewMessageStartEvent("id-1", "gpt-4")
|
|
decoder := &mockStreamDecoder{
|
|
chunks: [][]canonical.CanonicalStreamEvent{{event}},
|
|
}
|
|
encoder := &mockStreamEncoder{
|
|
events: [][]byte{[]byte("data: ok\n\n")},
|
|
}
|
|
|
|
chain := NewMiddlewareChain()
|
|
chain.Use(&recordingMiddleware{name: "mw1", records: &records})
|
|
ctx := NewConversionContext(InterfaceTypeChat)
|
|
|
|
converter := NewCanonicalStreamConverterWithMiddleware(decoder, encoder, chain, *ctx, "openai", "anthropic", "")
|
|
result := converter.ProcessChunk([]byte("raw"))
|
|
|
|
assert.Len(t, result, 1)
|
|
assert.Equal(t, []string{"stream:mw1"}, records)
|
|
assert.Equal(t, []byte("data: ok\n\n"), result[0])
|
|
}
|
|
|
|
func TestCanonicalStreamConverter_Flush(t *testing.T) {
|
|
decoder := &mockStreamDecoder{
|
|
flush: []canonical.CanonicalStreamEvent{
|
|
canonical.NewMessageStopEvent(),
|
|
},
|
|
}
|
|
encoder := &mockStreamEncoder{
|
|
events: [][]byte{[]byte("data: stop\n\n")},
|
|
flush: [][]byte{[]byte("data: flush\n\n")},
|
|
}
|
|
|
|
converter := NewCanonicalStreamConverter(decoder, encoder)
|
|
result := converter.Flush()
|
|
|
|
assert.Len(t, result, 2)
|
|
assert.Equal(t, []byte("data: stop\n\n"), result[0])
|
|
assert.Equal(t, []byte("data: flush\n\n"), result[1])
|
|
}
|
|
|
|
func TestCanonicalStreamConverter_EmptyDecoder(t *testing.T) {
|
|
decoder := &mockStreamDecoder{}
|
|
encoder := &mockStreamEncoder{}
|
|
|
|
converter := NewCanonicalStreamConverter(decoder, encoder)
|
|
result := converter.ProcessChunk([]byte("raw"))
|
|
|
|
assert.Nil(t, result)
|
|
}
|
|
|
|
func TestCanonicalStreamConverter_MiddlewareError_Continue(t *testing.T) {
|
|
event := canonical.NewMessageStartEvent("id-1", "gpt-4")
|
|
decoder := &mockStreamDecoder{
|
|
chunks: [][]canonical.CanonicalStreamEvent{{event}},
|
|
}
|
|
encoder := &mockStreamEncoder{
|
|
events: [][]byte{[]byte("data: ok\n\n")},
|
|
}
|
|
|
|
chain := NewMiddlewareChain()
|
|
chain.Use(&errorMiddleware{})
|
|
ctx := NewConversionContext(InterfaceTypeChat)
|
|
|
|
converter := NewCanonicalStreamConverterWithMiddleware(decoder, encoder, chain, *ctx, "openai", "anthropic", "")
|
|
result := converter.ProcessChunk([]byte("raw"))
|
|
|
|
assert.Nil(t, result, "middleware error should cause the event to be skipped (continue)")
|
|
}
|
|
|
|
func TestCanonicalStreamConverter_Flush_MiddlewareError_Continue(t *testing.T) {
|
|
event := canonical.NewMessageStartEvent("id-1", "gpt-4")
|
|
decoder := &mockStreamDecoder{
|
|
flush: []canonical.CanonicalStreamEvent{event},
|
|
}
|
|
encoder := &mockStreamEncoder{
|
|
events: [][]byte{[]byte("data: ok\n\n")},
|
|
flush: [][]byte{[]byte("data: encoder_flush\n\n")},
|
|
}
|
|
|
|
chain := NewMiddlewareChain()
|
|
chain.Use(&errorMiddleware{})
|
|
ctx := NewConversionContext(InterfaceTypeChat)
|
|
|
|
converter := NewCanonicalStreamConverterWithMiddleware(decoder, encoder, chain, *ctx, "openai", "anthropic", "")
|
|
result := converter.Flush()
|
|
|
|
assert.Len(t, result, 1)
|
|
assert.Equal(t, []byte("data: encoder_flush\n\n"), result[0])
|
|
}
|
|
|
|
func TestCanonicalStreamConverter_Flush_DecoderAndEncoderBothProduce(t *testing.T) {
|
|
event := canonical.NewMessageStartEvent("id-1", "gpt-4")
|
|
decoder := &mockStreamDecoder{
|
|
flush: []canonical.CanonicalStreamEvent{event},
|
|
}
|
|
encoder := &mockStreamEncoder{
|
|
events: [][]byte{[]byte("data: decoder_flush\n\n")},
|
|
flush: [][]byte{[]byte("data: encoder_flush\n\n")},
|
|
}
|
|
|
|
converter := NewCanonicalStreamConverter(decoder, encoder)
|
|
result := converter.Flush()
|
|
|
|
assert.Len(t, result, 2)
|
|
assert.Equal(t, []byte("data: decoder_flush\n\n"), result[0])
|
|
assert.Equal(t, []byte("data: encoder_flush\n\n"), result[1])
|
|
}
|
|
|
|
type errorMiddleware struct{}
|
|
|
|
func (m *errorMiddleware) Intercept(req *canonical.CanonicalRequest, clientProtocol, providerProtocol string, ctx *ConversionContext) (*canonical.CanonicalRequest, error) {
|
|
return nil, fmt.Errorf("middleware error")
|
|
}
|
|
|
|
func (m *errorMiddleware) InterceptStreamEvent(event *canonical.CanonicalStreamEvent, clientProtocol, providerProtocol string, ctx *ConversionContext) (*canonical.CanonicalStreamEvent, error) {
|
|
return nil, fmt.Errorf("stream middleware error")
|
|
}
|