Skip to content

Commit ebbfb44

Browse files
committed
fix gemini chat params
1 parent adf8216 commit ebbfb44

5 files changed

Lines changed: 31 additions & 23 deletions

File tree

internal/api/chat/create_conversation_message_stream_v2.go

Lines changed: 3 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,12 @@ package chat
22

33
import (
44
"context"
5-
"fmt"
65
"paperdebugger/internal/api/mapper"
76
"paperdebugger/internal/libs/contextutil"
87
"paperdebugger/internal/libs/shared"
98
"paperdebugger/internal/models"
109
"paperdebugger/internal/services"
1110
chatv2 "paperdebugger/pkg/gen/api/chat/v2"
12-
"strings"
1311

1412
"github.com/google/uuid"
1513
"github.com/openai/openai-go/v3"
@@ -293,23 +291,17 @@ func (s *ChatServerV2) CreateConversationMessageStream(
293291
if customModel == nil {
294292
// User did not specify API key for this model
295293
llmProvider = &models.LLMProviderConfig{
296-
APIKey: settings.OpenAIAPIKey,
294+
APIKey: "",
297295
}
298296
} else {
299-
modelSlug = modelSlug[strings.Index(modelSlug, "/")+1:]
300297
llmProvider = &models.LLMProviderConfig{
301298
APIKey: customModel.APIKey,
302299
Endpoint: customModel.BaseUrl,
303300
}
304301
}
305302

306-
fmt.Println(modelSlug)
307-
fmt.Println(llmProvider.Endpoint)
308-
fmt.Println(llmProvider.APIKey)
309-
fmt.Println("************************")
310-
openaiChatHistory, inappChatHistory, err := s.aiClientV2.ChatCompletionStreamV2(ctx, stream, conversation.ID.Hex(), modelSlug, conversation.OpenaiChatHistoryCompletion, llmProvider)
303+
openaiChatHistory, inappChatHistory, err := s.aiClientV2.ChatCompletionStreamV2(ctx, stream, conversation.ID.Hex(), modelSlug, customModel != nil, conversation.OpenaiChatHistoryCompletion, llmProvider)
311304
if err != nil {
312-
fmt.Println(err)
313305
return s.sendStreamError(stream, err)
314306
}
315307

@@ -334,7 +326,7 @@ func (s *ChatServerV2) CreateConversationMessageStream(
334326
for i, bsonMsg := range conversation.InappChatHistory {
335327
protoMessages[i] = mapper.BSONToChatMessageV2(bsonMsg)
336328
}
337-
title, err := s.aiClientV2.GetConversationTitleV2(ctx, protoMessages, llmProvider)
329+
title, err := s.aiClientV2.GetConversationTitleV2(ctx, protoMessages, llmProvider, modelSlug, customModel != nil)
338330
if err != nil {
339331
s.logger.Error("Failed to get conversation title", "error", err, "conversationID", conversation.ID.Hex())
340332
return

internal/services/toolkit/client/client_v2.go

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package client
22

33
import (
4-
"fmt"
54
"paperdebugger/internal/libs/cfg"
65
"paperdebugger/internal/libs/db"
76
"paperdebugger/internal/libs/logger"
@@ -47,9 +46,6 @@ func (a *AIClientV2) GetOpenAIClient(llmConfig *models.LLMProviderConfig) *opena
4746
APIKey = a.cfg.InferenceAPIKey
4847
}
4948

50-
fmt.Println(Endpoint)
51-
fmt.Println(APIKey)
52-
5349
opts := []option.RequestOption{
5450
option.WithAPIKey(APIKey),
5551
option.WithBaseURL(Endpoint),

internal/services/toolkit/client/completion_v2.go

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@ package client
33
import (
44
"context"
55
"encoding/json"
6-
"fmt"
76
"paperdebugger/internal/models"
87
"paperdebugger/internal/services/toolkit/handler"
98
chatv2 "paperdebugger/pkg/gen/api/chat/v2"
@@ -26,8 +25,8 @@ import (
2625
// 1. The full chat history sent to the language model (including any tool call results).
2726
// 2. The incremental chat history visible to the user (including tool call results and assistant responses).
2827
// 3. An error, if any occurred during the process.
29-
func (a *AIClientV2) ChatCompletionV2(ctx context.Context, modelSlug string, messages OpenAIChatHistory, llmProvider *models.LLMProviderConfig) (OpenAIChatHistory, AppChatHistory, error) {
30-
openaiChatHistory, inappChatHistory, err := a.ChatCompletionStreamV2(ctx, nil, "", modelSlug, messages, llmProvider)
28+
func (a *AIClientV2) ChatCompletionV2(ctx context.Context, modelSlug string, isCustomModel bool, messages OpenAIChatHistory, llmProvider *models.LLMProviderConfig) (OpenAIChatHistory, AppChatHistory, error) {
29+
openaiChatHistory, inappChatHistory, err := a.ChatCompletionStreamV2(ctx, nil, "", modelSlug, isCustomModel, messages, llmProvider)
3130
if err != nil {
3231
return nil, nil, err
3332
}
@@ -55,7 +54,7 @@ func (a *AIClientV2) ChatCompletionV2(ctx context.Context, modelSlug string, mes
5554
// - If tool calls are required, it handles them and appends the results to the chat history, then continues the loop.
5655
// - If no tool calls are needed, it appends the assistant's response and exits the loop.
5756
// - Finally, it returns the updated chat histories and any error encountered.
58-
func (a *AIClientV2) ChatCompletionStreamV2(ctx context.Context, callbackStream chatv2.ChatService_CreateConversationMessageStreamServer, conversationId string, modelSlug string, messages OpenAIChatHistory, llmProvider *models.LLMProviderConfig) (OpenAIChatHistory, AppChatHistory, error) {
57+
func (a *AIClientV2) ChatCompletionStreamV2(ctx context.Context, callbackStream chatv2.ChatService_CreateConversationMessageStreamServer, conversationId string, modelSlug string, isCustomModel bool, messages OpenAIChatHistory, llmProvider *models.LLMProviderConfig) (OpenAIChatHistory, AppChatHistory, error) {
5958
openaiChatHistory := messages
6059
inappChatHistory := AppChatHistory{}
6160

@@ -66,11 +65,14 @@ func (a *AIClientV2) ChatCompletionStreamV2(ctx context.Context, callbackStream
6665
streamHandler.SendFinalization()
6766
}()
6867

68+
if isCustomModel {
69+
// e.g., Strip "google/" from "google/gemini-2.5-flash"
70+
modelSlug = modelSlug[strings.Index(modelSlug, "/")+1:]
71+
}
72+
6973
oaiClient := a.GetOpenAIClient(llmProvider)
7074
params := getDefaultParamsV2(modelSlug, a.toolCallHandler.Registry)
7175

72-
fmt.Println(params)
73-
7476
for {
7577
params.Messages = openaiChatHistory
7678
// var openaiOutput OpenAIChatHistory

internal/services/toolkit/client/get_conversation_title_v2.go

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ import (
1313
"github.com/samber/lo"
1414
)
1515

16-
func (a *AIClientV2) GetConversationTitleV2(ctx context.Context, inappChatHistory []*chatv2.Message, llmProvider *models.LLMProviderConfig) (string, error) {
16+
func (a *AIClientV2) GetConversationTitleV2(ctx context.Context, inappChatHistory []*chatv2.Message, llmProvider *models.LLMProviderConfig, modelSlug string, isCustomModel bool) (string, error) {
1717
messages := lo.Map(inappChatHistory, func(message *chatv2.Message, _ int) string {
1818
if _, ok := message.Payload.MessageType.(*chatv2.MessagePayload_Assistant); ok {
1919
return fmt.Sprintf("Assistant: %s", message.Payload.GetAssistant().GetContent())
@@ -29,7 +29,13 @@ func (a *AIClientV2) GetConversationTitleV2(ctx context.Context, inappChatHistor
2929
message := strings.Join(messages, "\n")
3030
message = fmt.Sprintf("%s\nBased on above conversation, generate a short, clear, and descriptive title that summarizes the main topic or purpose of the discussion. The title should be concise, specific, and use natural language. Avoid vague or generic titles. Use abbreviation and short words if possible. Use 3-5 words if possible. Give me the title only, no other text including any other words.", message)
3131

32-
_, resp, err := a.ChatCompletionV2(ctx, "gpt-5-nano", OpenAIChatHistory{
32+
// Default model if user is not using their own
33+
modelToUse := "gpt-5-nano"
34+
if isCustomModel {
35+
modelToUse = modelSlug
36+
}
37+
38+
_, resp, err := a.ChatCompletionV2(ctx, modelToUse, isCustomModel, OpenAIChatHistory{
3339
openai.SystemMessage("You are a helpful assistant that generates a title for a conversation."),
3440
openai.UserMessage(message),
3541
}, llmProvider)

internal/services/toolkit/client/utils_v2.go

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,18 @@ func getDefaultParamsV2(modelSlug string, toolRegistry *registry.ToolRegistryV2)
6666
"o1",
6767
"codex-mini-latest",
6868
}
69+
70+
// Gemini does not support Store param
71+
if strings.HasPrefix(strings.ToLower(modelSlug), "gemini") {
72+
return openaiv3.ChatCompletionNewParams{
73+
Model: modelSlug,
74+
Temperature: openaiv3.Float(0.7),
75+
MaxCompletionTokens: openaiv3.Int(4000),
76+
Tools: toolRegistry.GetTools(),
77+
ParallelToolCalls: openaiv3.Bool(true),
78+
}
79+
}
80+
6981
for _, model := range reasoningModels {
7082
if strings.Contains(modelSlug, model) {
7183
return openaiv3.ChatCompletionNewParams{

0 commit comments

Comments
 (0)