Skip to content

Commit 81590ad

Browse files
authored
Merge pull request #111 from FlowTestAI/make-openai-impl-robust
feat: make openai output more accurate results
2 parents 5980f03 + a45f91a commit 81590ad

2 files changed

Lines changed: 47 additions & 44 deletions

File tree

packages/flowtest-electron/src/utils/flowtestai.js

Lines changed: 42 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ const { OpenAIEmbeddings } = require('@langchain/openai');
77

88
const SYSTEM_MESSAGE = `You are a helpful assistant. \
99
Respond to the following prompt by using function_call and then summarize actions. \
10-
Ask for clarification if a user request is ambiguous.`;
10+
If a user request is ambiguous, choose the best response possible.`;
1111

1212
// Maximum number of function calls allowed to prevent infinite or lengthy loops
1313
const MAX_CALLS = 10;
@@ -62,33 +62,29 @@ class FlowtestAI {
6262
};
6363
}
6464

65-
const f = { name: function_name, description: desc, parameters: schema };
66-
// ignore functions with circular dependency
67-
if (!this.isCyclic(f)) {
65+
const f = {
66+
type: 'function',
67+
function: { name: function_name, description: desc, parameters: schema },
68+
};
69+
70+
if (this.isCyclic(f)) {
71+
functions.push({
72+
type: 'function',
73+
function: { name: function_name, description: desc, parameters: {} },
74+
});
75+
} else {
6876
functions.push(f);
6977
}
7078
});
7179
});
72-
// console.log(JSON.stringify(functions));
80+
7381
return functions;
7482
}
7583

7684
async filter_functions(functions, instruction, apiKey) {
77-
const chunkSize = 32;
78-
const chunks = [];
79-
80-
for (let i = 0; i < functions.length; i += chunkSize) {
81-
const chunk = functions.slice(i, i + chunkSize);
82-
chunks.push(chunk);
83-
}
84-
85-
const documents = chunks.map((chunk) => {
86-
return JSON.stringify(
87-
chunk.map((f) => {
88-
const { parameters, ...fDescription } = f;
89-
return fDescription;
90-
}),
91-
);
85+
const documents = functions.map((f) => {
86+
const { parameters, ...fDescription } = f.function;
87+
return JSON.stringify(fDescription);
9288
});
9389

9490
const vectorStore = await MemoryVectorStore.fromTexts(
@@ -99,17 +95,17 @@ class FlowtestAI {
9995
}),
10096
);
10197

102-
// 32 x 4 = 128 (max no of functions accepted by openAI function calling)
103-
const retrievedDocuments = await vectorStore.similaritySearch(instruction, 4);
98+
// 128 (max no of functions accepted by openAI function calling)
99+
const retrievedDocuments = await vectorStore.similaritySearch(instruction, 10);
104100
var selectedFunctions = [];
105101
retrievedDocuments.forEach((document) => {
106102
const pDocument = JSON.parse(document.pageContent);
107-
pDocument.forEach((outputF) => {
108-
const findF = functions.find((f) => f.name === outputF.name && f.description === outputF.description);
109-
if (findF) {
110-
selectedFunctions = selectedFunctions.concat(findF);
111-
}
112-
});
103+
const findF = functions.find(
104+
(f) => f.function.name === pDocument.name && f.function.description === pDocument.description,
105+
);
106+
if (findF) {
107+
selectedFunctions = selectedFunctions.concat(findF);
108+
}
113109
});
114110

115111
return selectedFunctions;
@@ -121,15 +117,16 @@ class FlowtestAI {
121117
});
122118

123119
return await openai.chat.completions.create({
124-
model: 'gpt-3.5-turbo-16k-0613',
125-
functions: functions,
126-
function_call: 'auto', // "auto" means the model can pick between generating a message or calling a function.
120+
model: 'gpt-4', //gpt-3.5-turbo-16k-0613
121+
tools: functions,
122+
tool_choice: 'auto', // "auto" means the model can pick between generating a message or calling a function.
127123
temperature: 0,
128124
messages: messages,
129125
});
130126
}
131127

132128
async process_user_instruction(functions, instruction, apiKey) {
129+
//console.log(functions.map((f) => f.function.name));
133130
let result = [];
134131
let num_calls = 0;
135132
const messages = [
@@ -139,23 +136,24 @@ class FlowtestAI {
139136

140137
while (num_calls < MAX_CALLS) {
141138
const response = await this.get_openai_response(functions, messages, apiKey);
142-
// console.log(response)
143139
const message = response['choices'][0]['message'];
144140

145-
if (message['function_call']) {
146-
console.log('Function call #: ', num_calls + 1);
147-
console.log(message['function_call']);
141+
if (message.tool_calls) {
148142
messages.push(message);
149-
150-
// We'll simply add a message to simulate successful function call.
151-
messages.push({
152-
role: 'function',
153-
content: 'success',
154-
name: message['function_call']['name'],
143+
message.tool_calls.map((tool_call) => {
144+
console.log('Function call #: ', num_calls + 1);
145+
console.log(JSON.stringify(tool_call));
146+
147+
// We'll simply add a message to simulate successful function call.
148+
messages.push({
149+
role: 'tool',
150+
content: 'success',
151+
tool_call_id: tool_call.id,
152+
});
153+
result.push(tool_call.function);
154+
155+
num_calls += 1;
155156
});
156-
result.push(message['function_call']);
157-
158-
num_calls += 1;
159157
} else {
160158
console.log('Message: ');
161159
console.log(message['content']);

packages/flowtest-electron/src/utils/generate-request-body.js

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,11 @@ const generateObjectExample = (schema, level, context) => {
6565
example[key] = generateRequestBodyExample(propertySchema, level, context);
6666
}
6767

68+
if (schema.additionalProperties) {
69+
example.additionalProperty1 = generateRequestBodyExample(schema.additionalProperties, level, context);
70+
example.additionalProperty2 = generateRequestBodyExample(schema.additionalProperties, level, context);
71+
}
72+
6873
context.processedSchemas.delete(schema);
6974
return example;
7075
};

0 commit comments

Comments
 (0)