Skip to content

Commit 2e493f1

Browse files
committed
optimize AI response exceeded max output length
Blocking sensitive information
1 parent 8dfc1f3 commit 2e493f1

5 files changed

Lines changed: 10 additions & 10 deletions

File tree

src/Plugins/BotSharp.Plugin.AnthropicAI/Providers/ChatCompletionProvider.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
6767
{
6868
_logger.LogWarning($"Action: {nameof(GetChatCompletions)}, Reason: {response.StopReason}, Agent: {agent.Name}, MaxOutputTokens: {parameters.MaxTokens}, Content:{text}");
6969

70-
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {parameters.MaxTokens}")
70+
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length")
7171
{
7272
CurrentAgentId = agent.Id,
7373
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,
@@ -141,7 +141,7 @@ public async Task<bool> GetChatCompletionsAsync(Agent agent, List<RoleDialogMode
141141
{
142142
_logger.LogWarning($"Action: {nameof(GetChatCompletionsAsync)}, Reason: {response.StopReason}, Agent: {agent.Name}, MaxOutputTokens: {parameters.MaxTokens}, Content:{text}");
143143

144-
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {parameters.MaxTokens}")
144+
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length")
145145
{
146146
CurrentAgentId = agent.Id,
147147
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,

src/Plugins/BotSharp.Plugin.AzureOpenAI/Providers/Chat/ChatCompletionProvider.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
8282
{
8383
_logger.LogWarning($"Action: {nameof(GetChatCompletions)}, Reason: {reason}, Agent: {agent.Name}, MaxOutputTokens: {options.MaxOutputTokenCount}, Content:{text}");
8484

85-
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {options.MaxOutputTokenCount}")
85+
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length")
8686
{
8787
CurrentAgentId = agent.Id,
8888
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,
@@ -218,7 +218,7 @@ public async Task<bool> GetChatCompletionsAsync(Agent agent,
218218
{
219219
_logger.LogWarning($"Action: {nameof(GetChatCompletionsAsync)}, Reason: {reason}, Agent: {agent.Name}, MaxOutputTokens: {options.MaxOutputTokenCount}, Content:{text}");
220220

221-
msg = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {options.MaxOutputTokenCount}")
221+
msg = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length")
222222
{
223223
CurrentAgentId = agent.Id,
224224
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,

src/Plugins/BotSharp.Plugin.DeepSeekAI/Providers/Chat/ChatCompletionProvider.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
7373
{
7474
_logger.LogWarning($"Action: {nameof(GetChatCompletions)}, Reason: {reason}, Agent: {agent.Name}, MaxOutputTokens: {options.MaxOutputTokenCount}, Content:{text}");
7575

76-
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {options.MaxOutputTokenCount}")
76+
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length")
7777
{
7878
CurrentAgentId = agent.Id,
7979
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,
@@ -185,7 +185,7 @@ public async Task<bool> GetChatCompletionsAsync(Agent agent, List<RoleDialogMode
185185
{
186186
_logger.LogWarning($"Action: {nameof(GetChatCompletionsAsync)}, Reason: {reason}, Agent: {agent.Name}, MaxOutputTokens: {options.MaxOutputTokenCount}, Content:{text}");
187187

188-
msg = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {options.MaxOutputTokenCount}")
188+
msg = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length")
189189
{
190190
CurrentAgentId = agent.Id,
191191
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,

src/Plugins/BotSharp.Plugin.GoogleAI/Providers/Chat/ChatCompletionProvider.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
7676
{
7777
_logger.LogWarning($"Action: {nameof(GetChatCompletions)}, Reason: {candidate.FinishReason}, Agent: {agent.Name}, MaxOutputTokens: {request.GenerationConfig?.MaxOutputTokens}, Content:{text}");
7878

79-
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {request.GenerationConfig?.MaxOutputTokens}")
79+
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length")
8080
{
8181
CurrentAgentId = agent.Id,
8282
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,
@@ -185,7 +185,7 @@ public async Task<bool> GetChatCompletionsAsync(Agent agent, List<RoleDialogMode
185185
{
186186
_logger.LogWarning($"Action: {nameof(GetChatCompletionsAsync)}, Reason: {candidate.FinishReason}, Agent: {agent.Name}, MaxOutputTokens: {messages.GenerationConfig?.MaxOutputTokens}, Content:{text}");
187187

188-
msg = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {messages.GenerationConfig?.MaxOutputTokens}")
188+
msg = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length")
189189
{
190190
CurrentAgentId = agent.Id,
191191
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,

src/Plugins/BotSharp.Plugin.OpenAI/Providers/Chat/ChatCompletionProvider.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
7777
{
7878
_logger.LogWarning($"Action: {nameof(GetChatCompletions)}, Reason: {reason}, Agent: {agent.Name}, MaxOutputTokens: {options.MaxOutputTokenCount}, Content:{text}");
7979

80-
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {options.MaxOutputTokenCount}")
80+
responseMessage = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length")
8181
{
8282
CurrentAgentId = agent.Id,
8383
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,
@@ -192,7 +192,7 @@ public async Task<bool> GetChatCompletionsAsync(Agent agent,
192192
{
193193
_logger.LogWarning($"Action: {nameof(GetChatCompletionsAsync)}, Reason: {reason}, Agent: {agent.Name}, MaxOutputTokens: {options.MaxOutputTokenCount}, Content:{text}");
194194

195-
msg = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length {options.MaxOutputTokenCount}")
195+
msg = new RoleDialogModel(AgentRole.Assistant, $"AI response exceeded max output length")
196196
{
197197
CurrentAgentId = agent.Id,
198198
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,

0 commit comments

Comments
 (0)