Skip to content

Commit 0e7b6ed

Browse files
committed
feat(workflow): enhance workflow execution, node updates, and chat integration
- Streamline workflow node execution by consolidating input handling and fixing LLM node execution issues - Optimize node updates using `unstable_batchedUpdates` to prevent re-renders and cursor jumps - Address CLI node execution and input handling issues - Fix file link support in chat messages for both standard and experimental UI - Bump vscode extension version to 1.74.0+0
1 parent 73d155b commit 0e7b6ed

File tree

6 files changed

+124
-151
lines changed

6 files changed

+124
-151
lines changed

vscode/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
"name": "cody-ai",
44
"private": true,
55
"displayName": "Cody: AI Code Assistant",
6-
"version": "1.71.3+0",
6+
"version": "1.74.0+0",
77
"publisher": "sourcegraph",
88
"license": "Apache-2.0",
99
"icon": "resources/sourcegraph.png",

vscode/src/chat/chat-view/ChatController.ts

+16-7
Original file line numberDiff line numberDiff line change
@@ -113,6 +113,7 @@ import {
113113
handleCopiedCode,
114114
handleSmartApply,
115115
} from '../../services/utils/codeblock-action-tracker'
116+
import { resolveRelativeOrAbsoluteUri } from '../../services/utils/edit-create-file'
116117
import { openExternalLinks } from '../../services/utils/workspace-action'
117118
import { TestSupport } from '../../test-support'
118119
import type { MessageErrorType } from '../MessageProvider'
@@ -350,13 +351,21 @@ export class ChatController implements vscode.Disposable, vscode.WebviewViewProv
350351
break
351352
}
352353
case 'openFileLink':
353-
vscode.commands.executeCommand('vscode.open', message.uri, {
354-
selection: message.range,
355-
preserveFocus: true,
356-
background: false,
357-
preview: true,
358-
viewColumn: vscode.ViewColumn.Beside,
359-
})
354+
{
355+
const workspaceUri = vscode.workspace.workspaceFolders?.[0].uri
356+
const uri = await resolveRelativeOrAbsoluteUri(
357+
workspaceUri,
358+
message.uri.path,
359+
message.uri
360+
)
361+
vscode.commands.executeCommand('vscode.open', uri, {
362+
selection: message.range,
363+
preserveFocus: true,
364+
background: false,
365+
preview: true,
366+
viewColumn: vscode.ViewColumn.One,
367+
})
368+
}
360369
break
361370
case 'openRemoteFile':
362371
this.openRemoteFile(message.uri, message.tryLocal ?? false)

vscode/src/workflow/workflow-executor.ts

+46-68
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
import * as os from 'node:os'
22
import * as path from 'node:path'
3-
import type { CLINode } from '@/workflow/components/nodes/CLI_Node'
43
import type { LLMNode } from '@/workflow/components/nodes/LLM_Node'
54
import type { LoopStartNode } from '@/workflow/components/nodes/LoopStart_Node'
65
import type { SearchContextNode } from '@/workflow/components/nodes/SearchContext_Node'
@@ -138,18 +137,8 @@ export async function executeWorkflow(
138137
switch (node.type) {
139138
case NodeType.CLI: {
140139
try {
141-
const inputs = combineParentOutputsByConnectionOrder(node.id, context)
142-
/*.map(
143-
output => sanitizeForShell(output)
144-
)*/
145-
const command = (node as CLINode).data.content
146-
? replaceIndexedInputs((node as CLINode).data.content, inputs, context)
147-
: ''
148140
result = await executeCLINode(
149-
{
150-
...(node as CLINode),
151-
data: { ...(node as CLINode).data, content: command },
152-
},
141+
node,
153142
abortSignal,
154143
persistentShell,
155144
webview,
@@ -176,50 +165,26 @@ export async function executeWorkflow(
176165
break
177166
}
178167
case NodeType.LLM: {
179-
const inputs = combineParentOutputsByConnectionOrder(node.id, context).map(input =>
180-
sanitizeForPrompt(input)
181-
)
182-
const prompt = node.data.content
183-
? replaceIndexedInputs(node.data.content, inputs, context)
184-
: ''
185-
186-
const oldTemperature = await chatClient.getTemperature()
187-
await chatClient.setTemperature((node as LLMNode).data.temperature)
188-
result = await executeLLMNode(
189-
{ ...node, data: { ...node.data, content: prompt } },
190-
chatClient,
191-
abortSignal
192-
)
193-
await chatClient.setTemperature(oldTemperature)
168+
try {
169+
result = await executeLLMNode(node, chatClient, abortSignal, context)
170+
} catch (error) {
171+
console.error('Error in LLM Node:', error)
172+
throw error
173+
}
194174
break
195175
}
196176
case NodeType.PREVIEW: {
197-
const inputs = combineParentOutputsByConnectionOrder(node.id, context)
198-
result = await executePreviewNode(inputs.join('\n'), node.id, webview, context)
177+
result = await executePreviewNode(node.id, webview, context)
199178
break
200179
}
201180

202181
case NodeType.INPUT: {
203-
const inputs = combineParentOutputsByConnectionOrder(node.id, context)
204-
const text = node.data.content
205-
? replaceIndexedInputs(node.data.content, inputs, context)
206-
: ''
207-
result = await executeInputNode(text)
182+
result = await executeInputNode(node, context)
208183
break
209184
}
210185

211186
case NodeType.SEARCH_CONTEXT: {
212-
const inputs = combineParentOutputsByConnectionOrder(node.id, context)
213-
const text = node.data.content
214-
? replaceIndexedInputs(node.data.content, inputs, context)
215-
: ''
216-
const allowRemoteContext = (node as SearchContextNode).data.local_remote
217-
result = await executeSearchContextNode(
218-
text,
219-
contextRetriever,
220-
abortSignal,
221-
allowRemoteContext
222-
)
187+
result = await executeSearchContextNode(node, contextRetriever, abortSignal, context)
223188
break
224189
}
225190
case NodeType.CODY_OUTPUT: {
@@ -276,8 +241,7 @@ export async function executeWorkflow(
276241
break
277242
}
278243
case NodeType.LOOP_END: {
279-
const inputs = combineParentOutputsByConnectionOrder(node.id, context)
280-
result = await executePreviewNode(inputs.join('\n'), node.id, webview, context)
244+
result = await executePreviewNode(node.id, webview, context)
281245
break
282246
}
283247

@@ -459,13 +423,13 @@ export function replaceIndexedInputs(
459423
*/
460424
export function combineParentOutputsByConnectionOrder(
461425
nodeId: string,
462-
context: IndexedExecutionContext
426+
context?: IndexedExecutionContext
463427
): string[] {
464-
const parentEdges = context.edgeIndex.byTarget.get(nodeId) || []
428+
const parentEdges = context?.edgeIndex.byTarget.get(nodeId) || []
465429

466430
return parentEdges
467431
.map(edge => {
468-
let output = context.nodeOutputs.get(edge.source)
432+
let output = context?.nodeOutputs.get(edge.source)
469433
if (Array.isArray(output)) {
470434
output = output.join('\n')
471435
}
@@ -504,15 +468,14 @@ export async function executeCLINode(
504468
if (!vscode.env.shell || !vscode.workspace.isTrusted) {
505469
throw new Error('Shell command is not supported in your current workspace.')
506470
}
507-
// Add validation for empty commands
508-
if (!node.data.content?.trim()) {
471+
const inputs = combineParentOutputsByConnectionOrder(node.id, context)
472+
const command = node.data.content ? replaceIndexedInputs(node.data.content, inputs, context) : ''
473+
if (!command.trim()) {
509474
throw new Error('CLI Node requires a non-empty command')
510475
}
511476

512477
const homeDir = os.homedir() || process.env.HOME || process.env.USERPROFILE || ''
513-
514-
let filteredCommand =
515-
(node as CLINode).data.content?.replaceAll(/(\s~\/)/g, ` ${homeDir}${path.sep}`) || ''
478+
let filteredCommand = command.replaceAll(/(\s~\/)/g, ` ${homeDir}${path.sep}`) || ''
516479

517480
if (node.data.needsUserApproval) {
518481
await webview.postMessage({
@@ -536,7 +499,7 @@ export async function executeCLINode(
536499

537500
try {
538501
const { output, exitCode } = await persistentShell.execute(filteredCommand, abortSignal)
539-
if (exitCode !== '0' && (node as CLINode).data.shouldAbort) {
502+
if (exitCode !== '0' && node.data.shouldAbort) {
540503
throw new Error(output)
541504
}
542505
context?.cliMetadata?.set(node.id, { exitCode: exitCode })
@@ -568,10 +531,18 @@ export async function executeCLINode(
568531
async function executeLLMNode(
569532
node: WorkflowNodes,
570533
chatClient: ChatClient,
571-
abortSignal?: AbortSignal
534+
abortSignal?: AbortSignal,
535+
context?: IndexedExecutionContext
572536
): Promise<string> {
573537
abortSignal?.throwIfAborted()
574-
if (!node.data.content) {
538+
const oldTemperature = await chatClient.getTemperature()
539+
await chatClient.setTemperature((node as LLMNode).data.temperature)
540+
541+
const inputs = combineParentOutputsByConnectionOrder(node.id, context).map(input =>
542+
sanitizeForPrompt(input)
543+
)
544+
const prompt = node.data.content ? replaceIndexedInputs(node.data.content, inputs, context) : ''
545+
if (!prompt || prompt.trim() === '') {
575546
throw new Error(`No prompt specified for LLM node ${node.id} with ${node.data.title}`)
576547
}
577548

@@ -589,7 +560,7 @@ async function executeLLMNode(
589560
...preamble,
590561
{
591562
speaker: 'human',
592-
text: PromptString.unsafe_fromUserQuery(node.data.content),
563+
text: PromptString.unsafe_fromUserQuery(prompt),
593564
},
594565
]
595566

@@ -628,14 +599,16 @@ async function executeLLMNode(
628599
}
629600
}
630601
} catch (error) {
602+
await chatClient.setTemperature(oldTemperature)
631603
reject(error)
632604
}
633605
})
634606
.catch(reject)
635607
})
636-
608+
await chatClient.setTemperature(oldTemperature)
637609
return await Promise.race([streamPromise, timeout])
638610
} catch (error) {
611+
await chatClient.setTemperature(oldTemperature)
639612
if (error instanceof Error) {
640613
if (error.name === 'AbortError') {
641614
throw new Error('Workflow execution aborted')
@@ -658,11 +631,11 @@ async function executeLLMNode(
658631
* @returns The trimmed input string.
659632
*/
660633
async function executePreviewNode(
661-
input: string,
662634
nodeId: string,
663635
webview: vscode.Webview,
664636
context: IndexedExecutionContext
665637
): Promise<string> {
638+
const input = combineParentOutputsByConnectionOrder(nodeId, context).join('\n')
666639
const processedInput = replaceIndexedInputs(input, [], context)
667640
const trimmedInput = processedInput.trim()
668641
const tokenCount = await TokenCounterUtils.encode(trimmedInput)
@@ -686,8 +659,10 @@ async function executePreviewNode(
686659
* @param input - The input string to be processed.
687660
* @returns The trimmed input string.
688661
*/
689-
async function executeInputNode(input: string): Promise<string> {
690-
return input.trim()
662+
async function executeInputNode(node: WorkflowNode, context: IndexedExecutionContext): Promise<string> {
663+
const inputs = combineParentOutputsByConnectionOrder(node.id, context)
664+
const text = node.data.content ? replaceIndexedInputs(node.data.content, inputs, context) : ''
665+
return text.trim()
691666
}
692667

693668
// #region 4 Search Context Node Execution */
@@ -700,12 +675,15 @@ async function executeInputNode(input: string): Promise<string> {
700675
* @returns An array of strings, where each string represents a formatted context item (path + newline + content).
701676
*/
702677
async function executeSearchContextNode(
703-
input: string,
678+
node: WorkflowNode,
704679
contextRetriever: Pick<ContextRetriever, 'retrieveContext'>,
705680
abortSignal: AbortSignal,
706-
allowRemoteContext: boolean
681+
context: IndexedExecutionContext
707682
): Promise<string> {
708683
abortSignal.throwIfAborted()
684+
const inputs = combineParentOutputsByConnectionOrder(node.id, context)
685+
const text = node.data.content ? replaceIndexedInputs(node.data.content, inputs, context) : ''
686+
const allowRemoteContext = (node as SearchContextNode).data.local_remote
709687
const corpusItems = await firstValueFrom(getCorpusContextItemsForEditorState(allowRemoteContext))
710688
if (corpusItems === pendingOperation || corpusItems.length === 0) {
711689
return ''
@@ -715,15 +693,15 @@ async function executeSearchContextNode(
715693
return ''
716694
}
717695
const span = tracer.startSpan('chat.submit')
718-
const context = await contextRetriever.retrieveContext(
696+
const fetchedContext = await contextRetriever.retrieveContext(
719697
toStructuredMentions(corpusItems),
720-
PromptString.unsafe_fromLLMResponse(input),
698+
PromptString.unsafe_fromLLMResponse(text),
721699
span,
722700
abortSignal,
723701
false
724702
)
725703
span.end()
726-
const result = context.map(item => {
704+
const result = fetchedContext.map(item => {
727705
// Format each context item as path + newline + content
728706
return `${item.uri.path}\n${item.content || ''}`
729707
})

vscode/webviews/chat/ChatMessageContent/ChatMessageContent.tsx

+2-20
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,19 @@
1+
import type { Guardrails, PromptString } from '@sourcegraph/cody-shared'
12
import { clsx } from 'clsx'
23
import { LRUCache } from 'lru-cache'
34
import { LoaderIcon, MinusIcon, PlusIcon } from 'lucide-react'
45
import type React from 'react'
56
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
6-
import { URI } from 'vscode-uri'
7-
8-
import type { Guardrails, PromptString } from '@sourcegraph/cody-shared'
9-
107
import type { FixupTaskID } from '../../../src/non-stop/FixupTask'
118
import { CodyTaskState } from '../../../src/non-stop/state'
129
import { type ClientActionListener, useClientActionListener } from '../../client/clientState'
1310
import { MarkdownFromCody } from '../../components/MarkdownFromCody'
1411
import { useLocalStorage } from '../../components/hooks'
15-
import { getVSCodeAPI } from '../../utils/VSCodeApi'
1612
import { useConfig } from '../../utils/useConfig'
1713
import type { PriorHumanMessageInfo } from '../cells/messageCell/assistant/AssistantMessageCell'
1814
import styles from './ChatMessageContent.module.css'
1915
import { createButtons, createButtonsExperimentalUI } from './create-buttons'
20-
import { extractThinkContent, getCodeBlockId, getFileName } from './utils'
16+
import { extractThinkContent, getCodeBlockId } from './utils'
2117

2218
export interface CodeBlockActionsProps {
2319
copyButtonOnSubmit: (text: string, event?: 'Keydown' | 'Button') => void
@@ -206,20 +202,6 @@ export const ChatMessageContent: React.FunctionComponent<ChatMessageContentProps
206202
} else {
207203
parent.appendChild(actionsContainer)
208204
}
209-
if (fileName) {
210-
const fileNameContainer = document.createElement('div')
211-
fileNameContainer.className = clsx(styles.fileNameContainer, styles.clickable)
212-
fileNameContainer.textContent = getFileName(fileName)
213-
fileNameContainer.title = fileName
214-
fileNameContainer.addEventListener('click', () => {
215-
// Using the existing vscode.workspace.openTextDocument API
216-
getVSCodeAPI().postMessage({
217-
command: 'openFileLink',
218-
uri: URI.file(fileName),
219-
})
220-
})
221-
parent.append(fileNameContainer)
222-
}
223205
}
224206
}
225207
}, [

vscode/webviews/chat/ChatMessageContent/create-buttons.ts

+8
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import { type Guardrails, isError } from '@sourcegraph/cody-shared'
2+
import { URI } from 'vscode-uri'
23
import type { FixupTaskID } from '../../../src/non-stop/FixupTask'
34
import { CodyTaskState } from '../../../src/non-stop/state'
45
import {
@@ -197,6 +198,13 @@ export function createButtonsExperimentalUI(
197198
fileNameContainer.className = styles.fileNameContainer
198199
fileNameContainer.textContent = getFileName(codeBlockName)
199200
fileNameContainer.title = codeBlockName
201+
fileNameContainer.addEventListener('click', () => {
202+
// Using the existing vscode.workspace.openTextDocument API
203+
getVSCodeAPI().postMessage({
204+
command: 'openFileLink',
205+
uri: URI.file(codeBlockName),
206+
})
207+
})
200208
metadataContainer.append(fileNameContainer)
201209
}
202210

0 commit comments

Comments
 (0)