Skip to content

Commit 3bb8fd4

Browse files
committed
feat: markdown rendering on complete response (no raw streaming display)
1 parent e2dad8f commit 3bb8fd4

1 file changed

Lines changed: 6 additions & 19 deletions

File tree

src/index.ts

Lines changed: 6 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -111,32 +111,20 @@ async function main() {
111111
// Create LLM provider
112112
const llm = createOpenAIProvider(config.llm);
113113

114-
// Streaming state
115-
let streamingStarted = false;
116-
117114
// Create agent
118115
const agent = new Agent({
119116
llm,
120117
workspaceDir,
121118
callbacks: {
122119
onAssistantText: (text) => {
123120
if (text.trim() === "HEARTBEAT_OK") return;
124-
if (streamingStarted) {
125-
// Text was already streamed, just finish
126-
process.stdout.write("\n\n");
127-
} else {
128-
// Non-streamed response: render markdown
129-
const rendered = renderMarkdown(text);
130-
console.log(chalk.green("\n🦐 ") + rendered);
131-
}
132-
streamingStarted = false;
121+
// Render complete response with markdown formatting
122+
const rendered = renderMarkdown(text);
123+
console.log(chalk.green("\n🦐 ") + rendered);
133124
},
134-
onTextChunk: (chunk) => {
135-
if (!streamingStarted) {
136-
process.stdout.write(chalk.green("\n🦐 "));
137-
streamingStarted = true;
138-
}
139-
process.stdout.write(chunk);
125+
onTextChunk: () => {
126+
// Streaming runs under the hood but we wait for the complete text
127+
// to render markdown properly
140128
},
141129
onToolCall: (name, args) => {
142130
console.log(
@@ -262,7 +250,6 @@ async function main() {
262250
});
263251

264252
async function handleMessage(text: string): Promise<void> {
265-
streamingStarted = false;
266253
console.log(chalk.dim("⏳ Thinking..."));
267254
try {
268255
await agent.chat(text);

0 commit comments

Comments
 (0)