Skip to content

Commit ea8e1c8

Browse files
committed
Merge branch 'main' of https://github.com/use-the-fork/Flowise into Bug/rework-redis-connection
# Conflicts: # packages/components/package.json # packages/ui/src/ui-component/dialog/ViewMessagesDialog.jsx # packages/ui/src/views/apikey/index.jsx # packages/ui/src/views/assistants/AssistantDialog.jsx # packages/ui/src/views/chatflows/index.jsx # packages/ui/src/views/credentials/index.jsx
2 parents 0f09faa + e0a0c83 commit ea8e1c8

38 files changed

+1987
-281
lines changed

LICENSE.md

+17-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,23 @@
1-
Apache License
1+
Apache License
22
Version 2.0, January 2004
33
http://www.apache.org/licenses/
44

5+
Flowise is governed by the Apache License 2.0, with additional terms and conditions outlined below:
6+
7+
Flowise can be used for commercial purposes for "backend-as-a-service" for your applications or as a development platform for enterprises. However, under specific conditions, you must reach out to the project's administrators to secure a commercial license:
8+
9+
a. Multi-tenant SaaS service: Unless you have explicit written authorization from Flowise, you may not utilize the Flowise source code to operate a multi-tenant SaaS service that closely resembles the Flowise cloud-based services.
10+
b. Logo and copyright information: While using Flowise in commercial application, you are prohibited from removing or altering the LOGO or copyright information displayed in the Flowise console and UI.
11+
12+
For inquiries regarding licensing matters, please contact [email protected] via email.
13+
14+
Contributors are required to consent to the following terms related to their contributed code:
15+
16+
a. The project maintainers have the authority to modify the open-source agreement to be more stringent or lenient.
17+
b. Contributed code can be used for commercial purposes, including Flowise's cloud-based services.
18+
19+
All other rights and restrictions are in accordance with the Apache License 2.0.
20+
521
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
622

723
1. Definitions.

packages/components/credentials/ElectricsearchUserPassword.credential.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,8 @@ class ElasticSearchUserPassword implements INodeCredential {
1111
this.label = 'ElasticSearch User Password'
1212
this.name = 'elasticSearchUserPassword'
1313
this.version = 1.0
14-
this.description =
15-
'Refer to <a target="_blank" href="https://www.elastic.co/guide/en/kibana/current/tutorial-secure-access-to-kibana.html">official guide</a> on how to get User Password from ElasticSearch'
14+
this.description = `Use Cloud ID field to enter your Elastic Cloud ID or the URL of the Elastic server instance.
15+
Refer to <a target="_blank" href="https://www.elastic.co/guide/en/elasticsearch/reference/current/setting-up-authentication.html">official guide</a> on how to get User Password from ElasticSearch.`
1616
this.inputs = [
1717
{
1818
label: 'Cloud ID',

packages/components/nodes/agents/OpenAIAssistant/OpenAIAssistant.ts

+130-13
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ class OpenAIAssistant_Agents implements INode {
111111

112112
const openai = new OpenAI({ apiKey: openAIApiKey })
113113
options.logger.info(`Clearing OpenAI Thread ${sessionId}`)
114-
await openai.beta.threads.del(sessionId)
114+
if (sessionId) await openai.beta.threads.del(sessionId)
115115
options.logger.info(`Successfully cleared OpenAI Thread ${sessionId}`)
116116
}
117117

@@ -135,31 +135,71 @@ class OpenAIAssistant_Agents implements INode {
135135

136136
const openai = new OpenAI({ apiKey: openAIApiKey })
137137

138-
// Retrieve assistant
139138
try {
140139
const assistantDetails = JSON.parse(assistant.details)
141140
const openAIAssistantId = assistantDetails.id
141+
142+
// Retrieve assistant
142143
const retrievedAssistant = await openai.beta.assistants.retrieve(openAIAssistantId)
143144

144145
if (formattedTools.length) {
145-
let filteredTools = uniqWith([...retrievedAssistant.tools, ...formattedTools], isEqual)
146+
let filteredTools = []
147+
for (const tool of retrievedAssistant.tools) {
148+
if (tool.type === 'code_interpreter' || tool.type === 'retrieval') filteredTools.push(tool)
149+
}
150+
filteredTools = uniqWith([...filteredTools, ...formattedTools], isEqual)
151+
// filter out tool with empty function
146152
filteredTools = filteredTools.filter((tool) => !(tool.type === 'function' && !(tool as any).function))
147153
await openai.beta.assistants.update(openAIAssistantId, { tools: filteredTools })
154+
} else {
155+
let filteredTools = retrievedAssistant.tools.filter((tool) => tool.type !== 'function')
156+
await openai.beta.assistants.update(openAIAssistantId, { tools: filteredTools })
148157
}
149158

150159
const chatmessage = await appDataSource.getRepository(databaseEntities['ChatMessage']).findOneBy({
151160
chatId: options.chatId
152161
})
153162

154163
let threadId = ''
164+
let isNewThread = false
155165
if (!chatmessage) {
156166
const thread = await openai.beta.threads.create({})
157167
threadId = thread.id
168+
isNewThread = true
158169
} else {
159170
const thread = await openai.beta.threads.retrieve(chatmessage.sessionId)
160171
threadId = thread.id
161172
}
162173

174+
// List all runs
175+
if (!isNewThread) {
176+
const promise = (threadId: string) => {
177+
return new Promise<void>((resolve) => {
178+
const timeout = setInterval(async () => {
179+
const allRuns = await openai.beta.threads.runs.list(threadId)
180+
if (allRuns.data && allRuns.data.length) {
181+
const firstRunId = allRuns.data[0].id
182+
const runStatus = allRuns.data.find((run) => run.id === firstRunId)?.status
183+
if (
184+
runStatus &&
185+
(runStatus === 'cancelled' ||
186+
runStatus === 'completed' ||
187+
runStatus === 'expired' ||
188+
runStatus === 'failed')
189+
) {
190+
clearInterval(timeout)
191+
resolve()
192+
}
193+
} else {
194+
clearInterval(timeout)
195+
resolve()
196+
}
197+
}, 500)
198+
})
199+
}
200+
await promise(threadId)
201+
}
202+
163203
// Add message to thread
164204
await openai.beta.threads.messages.create(threadId, {
165205
role: 'user',
@@ -217,48 +257,108 @@ class OpenAIAssistant_Agents implements INode {
217257
})
218258
resolve(state)
219259
} else {
220-
reject(
221-
new Error(
222-
`Error processing thread: ${state}, Thread ID: ${threadId}, Run ID: ${runId}. submit_tool_outputs.tool_calls are empty`
223-
)
224-
)
260+
await openai.beta.threads.runs.cancel(threadId, runId)
261+
resolve('requires_action_retry')
225262
}
226263
}
227264
} else if (state === 'cancelled' || state === 'expired' || state === 'failed') {
228265
clearInterval(timeout)
229-
reject(new Error(`Error processing thread: ${state}, Thread ID: ${threadId}, Run ID: ${runId}`))
266+
reject(
267+
new Error(`Error processing thread: ${state}, Thread ID: ${threadId}, Run ID: ${runId}, Status: ${state}`)
268+
)
230269
}
231270
}, 500)
232271
})
233272
}
234273

235274
// Polling run status
275+
let runThreadId = runThread.id
236276
let state = await promise(threadId, runThread.id)
237277
while (state === 'requires_action') {
238278
state = await promise(threadId, runThread.id)
239279
}
240280

281+
let retries = 3
282+
while (state === 'requires_action_retry') {
283+
if (retries > 0) {
284+
retries -= 1
285+
const newRunThread = await openai.beta.threads.runs.create(threadId, {
286+
assistant_id: retrievedAssistant.id
287+
})
288+
runThreadId = newRunThread.id
289+
state = await promise(threadId, newRunThread.id)
290+
} else {
291+
throw new Error(`Error processing thread: ${state}, Thread ID: ${threadId}`)
292+
}
293+
}
294+
241295
// List messages
242296
const messages = await openai.beta.threads.messages.list(threadId)
243297
const messageData = messages.data ?? []
244298
const assistantMessages = messageData.filter((msg) => msg.role === 'assistant')
245299
if (!assistantMessages.length) return ''
246300

247301
let returnVal = ''
302+
const fileAnnotations = []
248303
for (let i = 0; i < assistantMessages[0].content.length; i += 1) {
249304
if (assistantMessages[0].content[i].type === 'text') {
250305
const content = assistantMessages[0].content[i] as MessageContentText
251-
returnVal += content.text.value
252306

253-
//TODO: handle annotations
307+
if (content.text.annotations) {
308+
const message_content = content.text
309+
const annotations = message_content.annotations
310+
311+
const dirPath = path.join(getUserHome(), '.flowise', 'openai-assistant')
312+
313+
// Iterate over the annotations and add footnotes
314+
for (let index = 0; index < annotations.length; index++) {
315+
const annotation = annotations[index]
316+
let filePath = ''
317+
318+
// Gather citations based on annotation attributes
319+
const file_citation = (annotation as OpenAI.Beta.Threads.Messages.MessageContentText.Text.FileCitation)
320+
.file_citation
321+
if (file_citation) {
322+
const cited_file = await openai.files.retrieve(file_citation.file_id)
323+
// eslint-disable-next-line no-useless-escape
324+
const fileName = cited_file.filename.split(/[\/\\]/).pop() ?? cited_file.filename
325+
filePath = path.join(getUserHome(), '.flowise', 'openai-assistant', fileName)
326+
await downloadFile(cited_file, filePath, dirPath, openAIApiKey)
327+
fileAnnotations.push({
328+
filePath,
329+
fileName
330+
})
331+
} else {
332+
const file_path = (annotation as OpenAI.Beta.Threads.Messages.MessageContentText.Text.FilePath).file_path
333+
if (file_path) {
334+
const cited_file = await openai.files.retrieve(file_path.file_id)
335+
// eslint-disable-next-line no-useless-escape
336+
const fileName = cited_file.filename.split(/[\/\\]/).pop() ?? cited_file.filename
337+
filePath = path.join(getUserHome(), '.flowise', 'openai-assistant', fileName)
338+
await downloadFile(cited_file, filePath, dirPath, openAIApiKey)
339+
fileAnnotations.push({
340+
filePath,
341+
fileName
342+
})
343+
}
344+
}
345+
346+
// Replace the text with a footnote
347+
message_content.value = message_content.value.replace(`${annotation.text}`, `${filePath}`)
348+
}
349+
350+
returnVal += message_content.value
351+
} else {
352+
returnVal += content.text.value
353+
}
254354
} else {
255355
const content = assistantMessages[0].content[i] as MessageContentImageFile
256356
const fileId = content.image_file.file_id
257357
const fileObj = await openai.files.retrieve(fileId)
258358
const dirPath = path.join(getUserHome(), '.flowise', 'openai-assistant')
259359
const filePath = path.join(getUserHome(), '.flowise', 'openai-assistant', `${fileObj.filename}.png`)
260360

261-
await downloadFile(fileObj, filePath, dirPath, openAIApiKey)
361+
await downloadImg(openai, fileId, filePath, dirPath)
262362

263363
const bitmap = fsDefault.readFileSync(filePath)
264364
const base64String = Buffer.from(bitmap).toString('base64')
@@ -271,14 +371,31 @@ class OpenAIAssistant_Agents implements INode {
271371
return {
272372
text: returnVal,
273373
usedTools,
274-
assistant: { assistantId: openAIAssistantId, threadId, runId: runThread.id, messages: messageData }
374+
fileAnnotations,
375+
assistant: { assistantId: openAIAssistantId, threadId, runId: runThreadId, messages: messageData }
275376
}
276377
} catch (error) {
277378
throw new Error(error)
278379
}
279380
}
280381
}
281382

383+
const downloadImg = async (openai: OpenAI, fileId: string, filePath: string, dirPath: string) => {
384+
const response = await openai.files.content(fileId)
385+
386+
// Extract the binary data from the Response object
387+
const image_data = await response.arrayBuffer()
388+
389+
// Convert the binary data to a Buffer
390+
const image_data_buffer = Buffer.from(image_data)
391+
392+
// Save the image to a specific location
393+
if (!fsDefault.existsSync(dirPath)) {
394+
fsDefault.mkdirSync(path.dirname(filePath), { recursive: true })
395+
}
396+
fsDefault.writeFileSync(filePath, image_data_buffer)
397+
}
398+
282399
const downloadFile = async (fileObj: any, filePath: string, dirPath: string, openAIApiKey: string) => {
283400
try {
284401
const response = await fetch(`https://api.openai.com/v1/files/${fileObj.id}/content`, {

0 commit comments

Comments
 (0)