Skip to content

Commit 8dfc1f3

Browse files
committed
optimize AI response exceeded max output length
1 parent 2d9f0df commit 8dfc1f3

5 files changed

Lines changed: 89 additions & 10 deletions

File tree

src/Plugins/BotSharp.Plugin.AnthropicAI/Providers/ChatCompletionProvider.cs

Lines changed: 28 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
4848
var response = await client.Messages.GetClaudeMessageAsync(parameters);
4949

5050
RoleDialogModel responseMessage;
51+
var text = response.FirstMessage?.Text ?? string.Empty;
5152

5253
if (response.StopReason == StopReason.ToolUse)
5354
{
@@ -62,10 +63,20 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
6263
RenderedInstruction = string.Join("\r\n", renderedInstructions)
6364
};
6465
}
66+
else if (response.StopReason == StopReason.MaxTokens)
67+
{
68+
_logger.LogWarning($"Action: {nameof(GetChatCompletions)}, Reason: {response.StopReason}, Agent: {agent.Name}, MaxOutputTokens: {parameters.MaxTokens}, Content:{text}");
69+
70+
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {parameters.MaxTokens}")
71+
{
72+
CurrentAgentId = agent.Id,
73+
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,
74+
StopCompletion = true
75+
};
76+
}
6577
else
6678
{
67-
var message = response.FirstMessage;
68-
responseMessage = new RoleDialogModel(AgentRole.Assistant, message?.Text ?? string.Empty)
79+
responseMessage = new RoleDialogModel(AgentRole.Assistant, text)
6980
{
7081
CurrentAgentId = agent.Id,
7182
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,
@@ -108,6 +119,7 @@ public async Task<bool> GetChatCompletionsAsync(Agent agent, List<RoleDialogMode
108119
var response = await client.Messages.GetClaudeMessageAsync(parameters);
109120

110121
RoleDialogModel responseMessage;
122+
var text = response.FirstMessage?.Text ?? string.Empty;
111123

112124
if (response.StopReason == StopReason.ToolUse)
113125
{
@@ -125,10 +137,22 @@ public async Task<bool> GetChatCompletionsAsync(Agent agent, List<RoleDialogMode
125137
// Execute functions
126138
await onFunctionExecuting(responseMessage);
127139
}
140+
else if (response.StopReason == StopReason.MaxTokens)
141+
{
142+
_logger.LogWarning($"Action: {nameof(GetChatCompletionsAsync)}, Reason: {response.StopReason}, Agent: {agent.Name}, MaxOutputTokens: {parameters.MaxTokens}, Content:{text}");
143+
144+
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {parameters.MaxTokens}")
145+
{
146+
CurrentAgentId = agent.Id,
147+
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,
148+
StopCompletion = true
149+
};
150+
151+
await onMessageReceived(responseMessage);
152+
}
128153
else
129154
{
130-
var message = response.FirstMessage;
131-
responseMessage = new RoleDialogModel(AgentRole.Assistant, message?.Text ?? string.Empty)
155+
responseMessage = new RoleDialogModel(AgentRole.Assistant, text)
132156
{
133157
CurrentAgentId = agent.Id,
134158
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,

src/Plugins/BotSharp.Plugin.AzureOpenAI/Providers/Chat/ChatCompletionProvider.cs

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
8080
}
8181
else if (reason == ChatFinishReason.Length)
8282
{
83-
_logger.LogWarning($"Action: {nameof(GetChatCompletions)}, Reason: {reason}, Agent: {agent.Name}, MaxOutputTokens: {options.MaxOutputTokenCount}");
83+
_logger.LogWarning($"Action: {nameof(GetChatCompletions)}, Reason: {reason}, Agent: {agent.Name}, MaxOutputTokens: {options.MaxOutputTokenCount}, Content:{text}");
8484

8585
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {options.MaxOutputTokenCount}")
8686
{
@@ -214,6 +214,19 @@ public async Task<bool> GetChatCompletionsAsync(Agent agent,
214214
// Execute functions
215215
await onFunctionExecuting(funcContextIn);
216216
}
217+
else if (reason == ChatFinishReason.Length)
218+
{
219+
_logger.LogWarning($"Action: {nameof(GetChatCompletionsAsync)}, Reason: {reason}, Agent: {agent.Name}, MaxOutputTokens: {options.MaxOutputTokenCount}, Content:{text}");
220+
221+
msg = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {options.MaxOutputTokenCount}")
222+
{
223+
CurrentAgentId = agent.Id,
224+
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,
225+
StopCompletion = true,
226+
RenderedInstruction = string.Join("\r\n", renderedInstructions)
227+
};
228+
await onMessageReceived(msg);
229+
}
217230
else
218231
{
219232
// Text response received

src/Plugins/BotSharp.Plugin.DeepSeekAI/Providers/Chat/ChatCompletionProvider.cs

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
7171
}
7272
else if (reason == ChatFinishReason.Length)
7373
{
74-
_logger.LogWarning($"Action: {nameof(GetChatCompletions)}, Reason: {reason}, Agent: {agent.Name}, MaxOutputTokens: {options.MaxOutputTokenCount}");
74+
_logger.LogWarning($"Action: {nameof(GetChatCompletions)}, Reason: {reason}, Agent: {agent.Name}, MaxOutputTokens: {options.MaxOutputTokenCount}, Content:{text}");
7575

7676
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {options.MaxOutputTokenCount}")
7777
{
@@ -181,6 +181,19 @@ public async Task<bool> GetChatCompletionsAsync(Agent agent, List<RoleDialogMode
181181
// Execute functions
182182
await onFunctionExecuting(funcContextIn);
183183
}
184+
else if (reason == ChatFinishReason.Length)
185+
{
186+
_logger.LogWarning($"Action: {nameof(GetChatCompletionsAsync)}, Reason: {reason}, Agent: {agent.Name}, MaxOutputTokens: {options.MaxOutputTokenCount}, Content:{text}");
187+
188+
msg = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {options.MaxOutputTokenCount}")
189+
{
190+
CurrentAgentId = agent.Id,
191+
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,
192+
StopCompletion = true,
193+
RenderedInstruction = string.Join("\r\n", renderedInstructions)
194+
};
195+
await onMessageReceived(msg);
196+
}
184197
else
185198
{
186199
// Text response received

src/Plugins/BotSharp.Plugin.GoogleAI/Providers/Chat/ChatCompletionProvider.cs

Lines changed: 19 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
using BotSharp.Abstraction.Files;
22
using BotSharp.Abstraction.Files.Models;
3-
using BotSharp.Abstraction.Utilities;
43
using BotSharp.Abstraction.Files.Utilities;
54
using BotSharp.Abstraction.Hooks;
65
using BotSharp.Abstraction.MessageHub.Models;
@@ -75,9 +74,9 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
7574
}
7675
else if (candidate?.FinishReason == FinishReason.MAX_TOKENS)
7776
{
78-
_logger.LogWarning($"Action: {nameof(GetChatCompletions)}, Reason: {candidate.FinishReason}, Agent: {agent.Name}");
77+
_logger.LogWarning($"Action: {nameof(GetChatCompletions)}, Reason: {candidate.FinishReason}, Agent: {agent.Name}, MaxOutputTokens: {request.GenerationConfig?.MaxOutputTokens}, Content:{text}");
7978

80-
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length")
79+
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {request.GenerationConfig?.MaxOutputTokens}")
8180
{
8281
CurrentAgentId = agent.Id,
8382
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,
@@ -182,6 +181,23 @@ public async Task<bool> GetChatCompletionsAsync(Agent agent, List<RoleDialogMode
182181
// Execute functions
183182
await onFunctionExecuting(funcContextIn);
184183
}
184+
else if (candidate?.FinishReason == FinishReason.MAX_TOKENS)
185+
{
186+
_logger.LogWarning($"Action: {nameof(GetChatCompletionsAsync)}, Reason: {candidate.FinishReason}, Agent: {agent.Name}, MaxOutputTokens: {messages.GenerationConfig?.MaxOutputTokens}, Content:{text}");
187+
188+
msg = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {messages.GenerationConfig?.MaxOutputTokens}")
189+
{
190+
CurrentAgentId = agent.Id,
191+
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,
192+
StopCompletion = true,
193+
MetaData = new Dictionary<string, string?>
194+
{
195+
[Constants.ThoughtSignature] = part?.ThoughtSignature
196+
},
197+
RenderedInstruction = string.Join("\r\n", renderedInstructions)
198+
};
199+
await onMessageReceived(msg);
200+
}
185201
else
186202
{
187203
// Text response received

src/Plugins/BotSharp.Plugin.OpenAI/Providers/Chat/ChatCompletionProvider.cs

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
7575
}
7676
else if (reason == ChatFinishReason.Length)
7777
{
78-
_logger.LogWarning($"Action: {nameof(GetChatCompletions)}, Reason: {reason}, Agent: {agent.Name}, MaxOutputTokens: {options.MaxOutputTokenCount}");
78+
_logger.LogWarning($"Action: {nameof(GetChatCompletions)}, Reason: {reason}, Agent: {agent.Name}, MaxOutputTokens: {options.MaxOutputTokenCount}, Content:{text}");
7979

8080
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {options.MaxOutputTokenCount}")
8181
{
@@ -188,6 +188,19 @@ public async Task<bool> GetChatCompletionsAsync(Agent agent,
188188
// Execute functions
189189
await onFunctionExecuting(funcContextIn);
190190
}
191+
else if (reason == ChatFinishReason.Length)
192+
{
193+
_logger.LogWarning($"Action: {nameof(GetChatCompletionsAsync)}, Reason: {reason}, Agent: {agent.Name}, MaxOutputTokens: {options.MaxOutputTokenCount}, Content:{text}");
194+
195+
msg = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {options.MaxOutputTokenCount}")
196+
{
197+
CurrentAgentId = agent.Id,
198+
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,
199+
StopCompletion = true,
200+
RenderedInstruction = string.Join("\r\n", renderedInstructions)
201+
};
202+
await onMessageReceived(msg);
203+
}
191204
else
192205
{
193206
// Text response received

0 commit comments

Comments
 (0)