Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions backend/kitex_gen/coze/loop/llm/domain/manage/manage.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 6 additions & 0 deletions backend/modules/llm/domain/entity/manage.go
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,7 @@ type ProtocolConfig struct {
ProtocolConfigQwen *ProtocolConfigQwen `json:"protocol_config_qwen" yaml:"protocol_config_qwen" mapstructure:"protocol_config_qwen"`
ProtocolConfigQianfan *ProtocolConfigQianfan `json:"protocol_config_qianfan" yaml:"protocol_config_qianfan" mapstructure:"protocol_config_qianfan"`
ProtocolConfigArkBot *ProtocolConfigArkBot `json:"protocol_config_ark_bot" yaml:"protocol_config_ark_bot" mapstructure:"protocol_config_ark_bot"`
ProtocolConfigMiniMax *ProtocolConfigMiniMax `json:"protocol_config_minimax" yaml:"protocol_config_minimax" mapstructure:"protocol_config_minimax"`
}

type ProtocolConfigArk struct {
Expand Down Expand Up @@ -265,6 +266,10 @@ type ProtocolConfigQianfan struct {
ResponseFormatJsonSchema *string `json:"response_format_json_schema" yaml:"response_format_json_schema" mapstructure:"response_format_json_schema"`
}

type ProtocolConfigMiniMax struct {
ResponseFormatType string `json:"response_format_type" yaml:"response_format_type" mapstructure:"response_format_type"`
}

type ProtocolConfigArkBot struct {
Region string `json:"region" yaml:"region" mapstructure:"region"`
AccessKey string `json:"access_key" yaml:"access_key" mapstructure:"access_key"`
Expand Down Expand Up @@ -406,6 +411,7 @@ const (
ProtocolQwen Protocol = "qwen"
ProtocolQianfan Protocol = "qianfan"
ProtocolArkBot Protocol = "arkbot"
ProtocolMiniMax Protocol = "minimax"
)

type Family string
Expand Down
19 changes: 19 additions & 0 deletions backend/modules/llm/domain/service/llmfactory/factory_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -287,6 +287,25 @@ func TestFactoryImpl_CreateLLM(t *testing.T) {
wantNotNil: true,
wantErr: nil,
},
{
name: "eino_minimax",
args: args{
ctx: context.Background(),
model: &entity.Model{
Frame: entity.FrameDefault,
Protocol: entity.ProtocolMiniMax,
ProtocolConfig: &entity.ProtocolConfig{
APIKey: "your-api-key",
Model: "MiniMax-M2.7",
TimeoutMs: nil,
ProtocolConfigMiniMax: &entity.ProtocolConfigMiniMax{},
},
ParamConfig: paramCfg,
},
},
wantNotNil: true,
wantErr: nil,
},
{
name: "failed",
args: args{
Expand Down
43 changes: 43 additions & 0 deletions backend/modules/llm/domain/service/llmimpl/eino/init.go
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,8 @@ func NewLLM(ctx context.Context, model *entity.Model, opts ...entity.Option) (*L
chatModel, err = qianfanBuilder(ctx, model, opts...)
case entity.ProtocolArkBot:
chatModel, err = arkBotBuilder(ctx, model, opts...)
case entity.ProtocolMiniMax:
chatModel, err = miniMaxBuilder(ctx, model, opts...)
default:
err = errors.Errorf("eino unsupport the protocol:%s", model.Protocol)
}
Expand Down Expand Up @@ -441,3 +443,44 @@ func arkBotBuilder(ctx context.Context, model *entity.Model, opts ...entity.Opti
}
return arkbot.NewChatModel(ctx, cfg)
}

func miniMaxBuilder(ctx context.Context, model *entity.Model, opts ...entity.Option) (einoModel.ToolCallingChatModel, error) {
if err := checkModelBeforeBuild(model); err != nil {
return nil, err
}
p := model.ProtocolConfig
ops := entity.ApplyOptions(nil, opts...)

baseURL := p.BaseURL
if baseURL == "" {
baseURL = "https://api.minimax.io/v1"
}

cfg := &openai.ChatModelConfig{
APIKey: p.APIKey,
BaseURL: baseURL,
Model: p.Model,
MaxTokens: ops.MaxTokens,
Temperature: ops.Temperature,
TopP: ops.TopP,
Stop: ops.Stop,
FrequencyPenalty: ops.FrequencyPenalty,
PresencePenalty: ops.PresencePenalty,
}
if p.TimeoutMs != nil {
cfg.Timeout = time.Duration(*p.TimeoutMs) * time.Millisecond
}
if pc := p.ProtocolConfigMiniMax; pc != nil {
if pc.ResponseFormatType != "" {
cfg.ResponseFormat = &acl_openai.ChatCompletionResponseFormat{
Type: acl_openai.ChatCompletionResponseFormatType(pc.ResponseFormatType),
}
}
}
if ops.ResponseFormat != nil {
cfg.ResponseFormat = &acl_openai.ChatCompletionResponseFormat{
Type: acl_openai.ChatCompletionResponseFormatType(ops.ResponseFormat.Type),
}
}
return openai.NewChatModel(ctx, cfg)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
// Copyright (c) 2025 coze-dev Authors
// SPDX-License-Identifier: Apache-2.0

package eino

import (
"context"
"os"
"testing"

"github.com/stretchr/testify/require"

"github.com/coze-dev/coze-loop/backend/modules/llm/domain/entity"
)

func TestMiniMaxIntegration(t *testing.T) {
apiKey := os.Getenv("MINIMAX_API_KEY")
if apiKey == "" {
t.Skip("MINIMAX_API_KEY not set, skipping integration tests")
}

t.Run("minimax_m27_generate", func(t *testing.T) {
model := &entity.Model{
Protocol: entity.ProtocolMiniMax,
ProtocolConfig: &entity.ProtocolConfig{
APIKey: apiKey,
Model: "MiniMax-M2.7",
},
}
llm, err := NewLLM(context.Background(), model,
entity.WithTemperature(0.7),
entity.WithMaxTokens(100),
)
require.NoError(t, err)
require.NotNil(t, llm)

resp, err := llm.Generate(context.Background(), []*entity.Message{
{Role: entity.RoleUser, Content: "Say hello in one word."},
})
require.NoError(t, err)
require.NotNil(t, resp)
require.NotEmpty(t, resp.Content)
require.Equal(t, entity.RoleAssistant, resp.Role)
})

t.Run("minimax_m25_highspeed_generate", func(t *testing.T) {
model := &entity.Model{
Protocol: entity.ProtocolMiniMax,
ProtocolConfig: &entity.ProtocolConfig{
APIKey: apiKey,
Model: "MiniMax-M2.5-highspeed",
},
}
llm, err := NewLLM(context.Background(), model,
entity.WithTemperature(0.7),
entity.WithMaxTokens(100),
)
require.NoError(t, err)
require.NotNil(t, llm)

resp, err := llm.Generate(context.Background(), []*entity.Message{
{Role: entity.RoleUser, Content: "What is 2+2? Answer with just the number."},
})
require.NoError(t, err)
require.NotNil(t, resp)
require.NotEmpty(t, resp.Content)
})

t.Run("minimax_stream", func(t *testing.T) {
model := &entity.Model{
Protocol: entity.ProtocolMiniMax,
ProtocolConfig: &entity.ProtocolConfig{
APIKey: apiKey,
Model: "MiniMax-M2.7",
},
}
llm, err := NewLLM(context.Background(), model,
entity.WithTemperature(0.7),
entity.WithMaxTokens(100),
)
require.NoError(t, err)
require.NotNil(t, llm)

stream, err := llm.Stream(context.Background(), []*entity.Message{
{Role: entity.RoleUser, Content: "Count from 1 to 5."},
})
require.NoError(t, err)
require.NotNil(t, stream)
})
}
160 changes: 160 additions & 0 deletions backend/modules/llm/domain/service/llmimpl/eino/minimax_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,160 @@
// Copyright (c) 2025 coze-dev Authors
// SPDX-License-Identifier: Apache-2.0

package eino

import (
"context"
"testing"

"github.com/stretchr/testify/assert"

"github.com/coze-dev/coze-loop/backend/modules/llm/domain/entity"
"github.com/coze-dev/coze-loop/backend/pkg/lang/ptr"
)

func TestMiniMaxBuilder(t *testing.T) {
t.Run("basic_minimax_builder", func(t *testing.T) {
model := &entity.Model{
Protocol: entity.ProtocolMiniMax,
ProtocolConfig: &entity.ProtocolConfig{
APIKey: "test-api-key",
Model: "MiniMax-M2.7",
},
}
llm, err := NewLLM(context.Background(), model)
assert.NoError(t, err)
assert.NotNil(t, llm)
assert.Equal(t, entity.ProtocolMiniMax, llm.protocol)
})

t.Run("minimax_with_custom_base_url", func(t *testing.T) {
model := &entity.Model{
Protocol: entity.ProtocolMiniMax,
ProtocolConfig: &entity.ProtocolConfig{
BaseURL: "https://custom.minimax.io/v1",
APIKey: "test-api-key",
Model: "MiniMax-M2.7",
},
}
llm, err := NewLLM(context.Background(), model)
assert.NoError(t, err)
assert.NotNil(t, llm)
})

t.Run("minimax_with_timeout", func(t *testing.T) {
model := &entity.Model{
Protocol: entity.ProtocolMiniMax,
ProtocolConfig: &entity.ProtocolConfig{
APIKey: "test-api-key",
Model: "MiniMax-M2.5",
TimeoutMs: ptr.Of(int64(30000)),
},
}
llm, err := NewLLM(context.Background(), model)
assert.NoError(t, err)
assert.NotNil(t, llm)
})

t.Run("minimax_with_response_format", func(t *testing.T) {
model := &entity.Model{
Protocol: entity.ProtocolMiniMax,
ProtocolConfig: &entity.ProtocolConfig{
APIKey: "test-api-key",
Model: "MiniMax-M2.7",
ProtocolConfigMiniMax: &entity.ProtocolConfigMiniMax{
ResponseFormatType: "json_object",
},
},
}
llm, err := NewLLM(context.Background(), model)
assert.NoError(t, err)
assert.NotNil(t, llm)
})

t.Run("minimax_with_runtime_options", func(t *testing.T) {
model := &entity.Model{
Protocol: entity.ProtocolMiniMax,
ProtocolConfig: &entity.ProtocolConfig{
APIKey: "test-api-key",
Model: "MiniMax-M2.7",
},
}
llm, err := NewLLM(context.Background(), model,
entity.WithTemperature(0.7),
entity.WithTopP(0.9),
entity.WithMaxTokens(1024),
entity.WithStop([]string{"stop1"}),
)
assert.NoError(t, err)
assert.NotNil(t, llm)
})

t.Run("minimax_with_runtime_response_format", func(t *testing.T) {
model := &entity.Model{
Protocol: entity.ProtocolMiniMax,
ProtocolConfig: &entity.ProtocolConfig{
APIKey: "test-api-key",
Model: "MiniMax-M2.7",
},
}
llm, err := NewLLM(context.Background(), model,
entity.WithResponseFormat(&entity.ResponseFormat{Type: "json_object"}),
)
assert.NoError(t, err)
assert.NotNil(t, llm)
})

t.Run("minimax_nil_protocol_config", func(t *testing.T) {
model := &entity.Model{
Protocol: entity.ProtocolMiniMax,
ProtocolConfig: nil,
}
_, err := NewLLM(context.Background(), model)
assert.Error(t, err)
})

t.Run("minimax_m25_highspeed", func(t *testing.T) {
model := &entity.Model{
Protocol: entity.ProtocolMiniMax,
ProtocolConfig: &entity.ProtocolConfig{
APIKey: "test-api-key",
Model: "MiniMax-M2.5-highspeed",
},
}
llm, err := NewLLM(context.Background(), model)
assert.NoError(t, err)
assert.NotNil(t, llm)
})

t.Run("minimax_with_penalty_params", func(t *testing.T) {
model := &entity.Model{
Protocol: entity.ProtocolMiniMax,
ProtocolConfig: &entity.ProtocolConfig{
APIKey: "test-api-key",
Model: "MiniMax-M2.7",
},
}
llm, err := NewLLM(context.Background(), model,
entity.WithFrequencyPenalty(0.5),
entity.WithPresencePenalty(0.3),
)
assert.NoError(t, err)
assert.NotNil(t, llm)
})

t.Run("minimax_default_base_url", func(t *testing.T) {
// When BaseURL is empty, miniMaxBuilder should use the default MiniMax API URL
model := &entity.Model{
Protocol: entity.ProtocolMiniMax,
ProtocolConfig: &entity.ProtocolConfig{
BaseURL: "",
APIKey: "test-api-key",
Model: "MiniMax-M2.7",
},
}
llm, err := NewLLM(context.Background(), model)
assert.NoError(t, err)
assert.NotNil(t, llm)
})
}
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ export interface ProtocolConfig {
protocol_config_qianfan?: ProtocolConfigQianfan,
protocol_config_gemini?: ProtocolConfigGemini,
protocol_config_arkbot?: ProtocolConfigArkbot,
protocol_config_minimax?: ProtocolConfigMiniMax,
}
export interface ProtocolConfigArk {
/** Default: "cn-beijing" */
Expand Down Expand Up @@ -108,6 +109,9 @@ export interface ProtocolConfigQianfan {
response_format_type?: string,
response_format_json_schema?: string,
}
export interface ProtocolConfigMiniMax {
response_format_type?: string,
}
export interface ProtocolConfigArkbot {
/** Default: "cn-beijing" */
region?: string,
Expand Down Expand Up @@ -158,6 +162,7 @@ export enum Protocol {
protocol_qwen = "qwen",
protocol_qianfan = "qianfan",
protocol_arkbot = "arkbot",
protocol_minimax = "minimax",
}
export enum ParamType {
param_type_float = "float",
Expand Down
Loading
Loading