-
Notifications
You must be signed in to change notification settings - Fork 71
Expand file tree
/
Copy pathclient_v2.go
More file actions
120 lines (100 loc) · 3.36 KB
/
client_v2.go
File metadata and controls
120 lines (100 loc) · 3.36 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
package client
import (
"paperdebugger/internal/libs/cfg"
"paperdebugger/internal/libs/db"
"paperdebugger/internal/libs/logger"
"paperdebugger/internal/models"
"paperdebugger/internal/services"
"paperdebugger/internal/services/toolkit/handler"
"github.com/openai/openai-go/v3"
"github.com/openai/openai-go/v3/option"
"go.mongodb.org/mongo-driver/v2/mongo"
)
type AIClientV2 struct {
toolCallHandler *handler.ToolCallHandlerV2
db *mongo.Database
functionCallCollection *mongo.Collection
reverseCommentService *services.ReverseCommentService
projectService *services.ProjectService
cfg *cfg.Cfg
logger *logger.Logger
}
// SetOpenAIClient sets the appropriate OpenAI client based on the LLM provider config.
// If the config specifies a custom endpoint and API key, a new client is created for that endpoint.
// V2 uses the inference endpoint by default.
// When a user provides their own API key, use the /openai endpoint instead of /openrouter.
// MiniMax models are routed to the MiniMax API when MINIMAX_API_KEY is configured.
func (a *AIClientV2) GetOpenAIClient(llmConfig *models.LLMProviderConfig) *openai.Client {
var Endpoint string = llmConfig.Endpoint
var APIKey string = llmConfig.APIKey
if Endpoint == "" {
if models.IsMiniMaxModel(llmConfig.ModelName) && a.cfg.MiniMaxAPIKey != "" {
// Route MiniMax models to MiniMax API when server has MiniMax API key
Endpoint = a.cfg.MiniMaxBaseURL
APIKey = a.cfg.MiniMaxAPIKey
} else if APIKey != "" {
// User provided their own API key, use the OpenAI-compatible endpoint
Endpoint = a.cfg.OpenAIBaseURL // standard openai base url
} else {
// suffix needed for cloudflare gateway
Endpoint = a.cfg.InferenceBaseURL + "/openrouter"
}
}
if APIKey == "" {
APIKey = a.cfg.InferenceAPIKey
}
opts := []option.RequestOption{
option.WithAPIKey(APIKey),
option.WithBaseURL(Endpoint),
}
client := openai.NewClient(opts...)
return &client
}
func NewAIClientV2(
db *db.DB,
reverseCommentService *services.ReverseCommentService,
projectService *services.ProjectService,
cfg *cfg.Cfg,
logger *logger.Logger,
) *AIClientV2 {
database := db.Database("paperdebugger")
llmProvider := &models.LLMProviderConfig{
APIKey: cfg.OpenAIAPIKey,
}
var baseUrl string
var apiKey string
var modelSlug string
// User specified their own API key, use the OpenAI-compatible endpoint
if llmProvider != nil && llmProvider.IsCustom() {
baseUrl = cfg.OpenAIBaseURL
apiKey = cfg.OpenAIAPIKey
modelSlug = "gpt-5-nano"
// Use the default inference endpoint
} else {
// suffix needed for cloudflare gateway
baseUrl = cfg.InferenceBaseURL + "/openrouter"
apiKey = cfg.InferenceAPIKey
modelSlug = "openai/gpt-5-nano"
}
CheckOpenAIWorksV2(
openai.NewClient(
option.WithBaseURL(baseUrl),
option.WithAPIKey(apiKey),
),
baseUrl,
modelSlug,
logger,
)
toolRegistry := initializeToolkitV2(db, projectService, cfg, logger)
toolCallHandler := handler.NewToolCallHandlerV2(toolRegistry)
client := &AIClientV2{
toolCallHandler: toolCallHandler,
db: database,
functionCallCollection: database.Collection((models.FunctionCall{}).CollectionName()),
reverseCommentService: reverseCommentService,
projectService: projectService,
cfg: cfg,
logger: logger,
}
return client
}