diff --git a/components/ui/credential-selector.tsx b/components/ui/credential-selector.tsx
new file mode 100644
index 0000000..a9caab4
--- /dev/null
+++ b/components/ui/credential-selector.tsx
@@ -0,0 +1,320 @@
+"use client"
+
+import { useState, useEffect, useCallback } from 'react'
+import { Plus, Key, Database, Mail, Globe, Trash2, Edit3 } from 'lucide-react'
+import { Button } from './button'
+import { Dialog, DialogContent, DialogDescription, DialogFooter, DialogHeader, DialogTitle, DialogTrigger } from './dialog'
+import { MobileSheet } from './mobile-sheet'
+import { Input } from './input'
+import { Label } from './label'
+import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from './select'
+import { credentialStore, type StoredCredential } from '@/lib/credential-store'
+import { SecurityBadge, SecurityWarning } from './security-status'
+import { SECURITY_WARNINGS } from '@/lib/security'
+import { CredentialType } from '@/types/credentials'
+
+interface CredentialSelectorProps {
+ value: string
+ onChange: (credentialId: string) => void
+ credentialType?: CredentialType
+ placeholder?: string
+ className?: string
+}
+
+interface NewCredentialDialogProps {
+ open: boolean
+ onOpenChange: (open: boolean) => void
+ credentialType: CredentialType
+ onCredentialCreated: (credentialId: string) => void
+}
+
+function NewCredentialDialog({ open, onOpenChange, credentialType, onCredentialCreated }: NewCredentialDialogProps) {
+ const [name, setName] = useState('')
+ const [value, setValue] = useState('')
+ const [description, setDescription] = useState('')
+ const [loading, setLoading] = useState(false)
+ const [isMobile, setIsMobile] = useState(false)
+
+ // Detect screen size
+ useEffect(() => {
+ const checkScreenSize = () => {
+ setIsMobile(window.innerWidth < 640) // 640px is the 'sm' breakpoint
+ }
+
+ checkScreenSize()
+ window.addEventListener('resize', checkScreenSize)
+ return () => window.removeEventListener('resize', checkScreenSize)
+ }, [])
+
+ const handleCreate = async () => {
+ if (!name.trim() || !value.trim()) return
+
+ setLoading(true)
+ try {
+ const credentialId = credentialStore.storeCredential(
+ name.trim(),
+ value.trim(),
+ credentialType,
+ description.trim() || undefined
+ )
+
+ onCredentialCreated(credentialId)
+ onOpenChange(false)
+
+ // Reset form
+ setName('')
+ setValue('')
+ setDescription('')
+ } catch (error) {
+ console.error('Failed to create credential:', error)
+ alert('Failed to create credential. Please try again.')
+ } finally {
+ setLoading(false)
+ }
+ }
+
+ const getPlaceholder = () => {
+ switch (credentialType) {
+ case 'database':
+ return 'postgresql://user:password@localhost:5432/dbname'
+ case 'api':
+ return 'sk-1234567890abcdef...'
+ case 'email':
+ return 'your-app-password'
+ default:
+ return 'Your secret value'
+ }
+ }
+
+ const getIcon = () => {
+ switch (credentialType) {
+ case 'database':
+ return
+ case 'api':
+ return
+ case 'email':
+ return
+ default:
+ return
+ }
+ }
+
+ const getTypeLabel = () => {
+ switch (credentialType) {
+ case 'database':
+ return 'Database'
+ case 'api':
+ return 'API'
+ case 'email':
+ return 'Email'
+ default:
+ return 'Generic'
+ }
+ }
+
+ const renderContent = () => (
+
+
+
+
+
+ setName(e.target.value)}
+ placeholder={`My ${getTypeLabel()} Connection`}
+ className="bg-white text-gray-900 placeholder:text-gray-400 border-gray-300 h-9"
+ />
+
+
+
+
+ setValue(e.target.value)}
+ placeholder={getPlaceholder()}
+ className="bg-white text-gray-900 placeholder:text-gray-400 border-gray-300 h-9"
+ />
+
+
+
+
+ setDescription(e.target.value)}
+ placeholder="Brief description of this credential"
+ className="bg-white text-gray-900 placeholder:text-gray-400 border-gray-300 h-9"
+ />
+
+
+
+
+
+
+
+
+
+
+ )
+
+ return (
+ <>
+ {/* Mobile Sheet */}
+ {isMobile && (
+
+ {renderContent()}
+
+ )}
+
+ {/* Desktop Dialog */}
+ {!isMobile && (
+
+ )}
+ >
+ )
+}
+
+export function CredentialSelector({
+ value,
+ onChange,
+ credentialType = 'generic',
+ placeholder = "Select a credential",
+ className = ""
+}: CredentialSelectorProps) {
+ const [credentials, setCredentials] = useState[]>([])
+ const [showNewDialog, setShowNewDialog] = useState(false)
+ const [mounted, setMounted] = useState(false)
+
+ const loadCredentials = useCallback(() => {
+ const creds = credentialStore.getCredentialsByType(credentialType)
+ setCredentials(creds)
+ }, [credentialType])
+
+ useEffect(() => {
+ setMounted(true)
+ loadCredentials()
+ }, [credentialType, loadCredentials])
+
+ const handleCredentialCreated = (credentialId: string) => {
+ onChange(credentialId)
+ loadCredentials() // Refresh the list
+ }
+
+ const getIcon = () => {
+ switch (credentialType) {
+ case 'database':
+ return
+ case 'api':
+ return
+ case 'email':
+ return
+ default:
+ return
+ }
+ }
+
+ if (!mounted) {
+ return null // Avoid hydration issues
+ }
+
+ const selectedCredential = credentials.find(c => c.id === value)
+
+ return (
+
+
+
+
+
+
+
+
+
+ )
+}
diff --git a/components/ui/dialog.tsx b/components/ui/dialog.tsx
index 7326ef3..8a9c02e 100644
--- a/components/ui/dialog.tsx
+++ b/components/ui/dialog.tsx
@@ -121,7 +121,11 @@ DialogDescription.displayName = DialogPrimitive.Description.displayName
export {
Dialog,
DialogContent,
+ DialogDescription,
DialogHeader,
DialogFooter,
DialogTitle,
}
+
+// Also export DialogTrigger from Radix for external use
+export const DialogTrigger = DialogPrimitive.Trigger
diff --git a/components/workflow/node-config-panel.tsx b/components/workflow/node-config-panel.tsx
index 76d76b5..afce835 100644
--- a/components/workflow/node-config-panel.tsx
+++ b/components/workflow/node-config-panel.tsx
@@ -6,15 +6,35 @@ import { Button } from '@/components/ui/button'
import { Input } from '@/components/ui/input'
import { Label } from '@/components/ui/label'
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select'
+import { CredentialSelector } from '@/components/ui/credential-selector'
import { useWorkflowStore } from '@/hooks/use-workflow-store'
import { Dialog, DialogContent, DialogHeader, DialogTitle, DialogFooter } from '@/components/ui/dialog'
import { MobileSheet } from '@/components/ui/mobile-sheet'
import { useToast } from '@/components/ui/toaster'
import { WorkflowNode, NodeType, ActionType, TriggerType, HttpNodeConfig, ScheduleNodeConfig } from '@/types/workflow'
import { EMAIL_NODE_DEFINITION, EmailNodeConfig } from '@/nodes/EmailNode'
+import { CredentialType, toCredentialType } from '@/types/credentials'
import { WebhookNodeConfig } from '@/nodes/WebhookNode'
import { findNodeDefinition } from '@/lib/node-definitions'
import { SECURITY_WARNINGS, getSecurityStatus } from '@/lib/security'
+import { getArrayValue, getObjectValue, pathValueEquals, getTypedParameterValue, getSafeDescription, getSafePlaceholder, getValueAtPath, getSafeDefaultValue } from '@/lib/type-safe-utils'
+import { validateWorkflowId } from '@/lib/workflow-id-validation'
+
+/**
+ * Safely gets the workflowId from URL search params
+ * @returns A validated and URI-encoded workflowId
+ */
+function getSafeWorkflowIdFromUrl(): string {
+ if (typeof window === 'undefined') {
+ return encodeURIComponent('')
+ }
+
+ const urlParams = new URLSearchParams(window.location.search)
+ const workflowId = urlParams.get('workflowId')
+ const validatedWorkflowId = validateWorkflowId(workflowId)
+
+ return encodeURIComponent(validatedWorkflowId)
+}
export function NodeConfigPanel() {
const { nodes, selectedNodeId, isConfigPanelOpen, setConfigPanelOpen, setSelectedNodeId, updateNode, deleteNode, pendingDeleteNodeId, clearPendingDelete } = useWorkflowStore()
@@ -113,21 +133,59 @@ export function NodeConfigPanel() {
}
const handleConfigChange = (path: string, value: unknown) => {
- const setDeep = (obj: Record, p: string, v: unknown) => {
+ const setDeep = (obj: Record, p: string, v: unknown): Record => {
const parts = p.split('.')
- const clone: Record = { ...obj }
+
+ // Validate path segments to prevent prototype pollution
+ const dangerousSegments = ['__proto__', 'constructor', 'prototype']
+ for (const part of parts) {
+ if (dangerousSegments.includes(part.toLowerCase())) {
+ throw new Error(`Invalid path segment: "${part}" - potential prototype pollution attempt`)
+ }
+ }
+
+ // Type guard to check if value is a valid object
+ const isValidObject = (val: unknown): val is Record => {
+ return val !== null && typeof val === 'object' && !Array.isArray(val)
+ }
+
+ // Create a safe clone using Object.create(null) for the root to avoid prototype chain
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment -- Object.create(null) creates object without prototype
+ const clone: Record = Object.create(null)
+
+ // Safely copy properties from the original object
+ for (const [key, val] of Object.entries(obj)) {
+ if (Object.prototype.hasOwnProperty.call(obj, key)) {
+ clone[key] = val
+ }
+ }
+
let cur: Record = clone
for (let i = 0; i < parts.length - 1; i += 1) {
const key = parts[i]
const next = cur[key]
- if (typeof next !== 'object' || next === null) {
- cur[key] = {}
+ if (!isValidObject(next)) {
+ // Create safe object without prototype chain
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment -- Object.create(null) creates object without prototype
+ cur[key] = Object.create(null)
} else {
- cur[key] = { ...(next as Record) }
+ // Safely clone the nested object
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment -- Object.create(null) creates object without prototype
+ const nestedClone: Record = Object.create(null)
+ for (const [nestedKey, nestedVal] of Object.entries(next)) {
+ if (Object.prototype.hasOwnProperty.call(next, nestedKey)) {
+ nestedClone[nestedKey] = nestedVal
+ }
+ }
+ cur[key] = nestedClone
}
cur = cur[key] as Record
}
- cur[parts[parts.length - 1]] = v
+
+ // Set the final value only if the key is safe
+ const finalKey = parts[parts.length - 1]
+ cur[finalKey] = v
+
return clone
}
const nextConfig = setDeep((selectedNode.data.config as Record) || {}, path, value)
@@ -136,20 +194,67 @@ export function NodeConfigPanel() {
})
}
- const getValueAtPath = (obj: Record | undefined, path: string): unknown => {
- if (!obj) return undefined
- return path.split('.').reduce((acc: unknown, part: string) => {
- if (acc && typeof acc === 'object') {
- return (acc as Record)[part]
+ // Type-safe path utilities are now imported from lib/type-safe-utils
+
+ // Inline type-safe parameter helpers to avoid ESLint unsafe operations
+ const safeString = (value: unknown): string => typeof value === 'string' ? value : ''
+ const safeNumber = (value: unknown): number => typeof value === 'number' ? value : 0
+ const safeBoolean = (value: unknown): boolean => typeof value === 'boolean' ? value : false
+ const safeObject = (value: unknown): Record =>
+ (value && typeof value === 'object' && !Array.isArray(value)) ? value as Record : {}
+
+ const getParamValue = (path: string, paramType: 'string' | 'number' | 'boolean', defaultVal: unknown): string | number | boolean => {
+ const config = (selectedNode.data.config as Record) || {}
+ try {
+ if (paramType === 'string') {
+ return getTypedParameterValue(config, path, defaultVal, 'string')
+ } else if (paramType === 'number') {
+ return getTypedParameterValue(config, path, defaultVal, 'number')
+ } else {
+ return getTypedParameterValue(config, path, defaultVal, 'boolean')
+ }
+ } catch {
+ // Fallback for type safety
+ switch (paramType) {
+ case 'string':
+ return ''
+ case 'number':
+ return 0
+ case 'boolean':
+ return false
+ default:
+ return ''
}
- return undefined
- }, obj)
+ }
}
const renderConfig = () => {
const { data } = selectedNode
const def = findNodeDefinition(selectedNode)
if (def?.parameters && def.parameters.length > 0) {
+ // Define a proper interface for parameter definition
+ interface ExtendedParameterDefinition {
+ type: string
+ label: string
+ path: string
+ default?: unknown
+ description?: unknown
+ placeholder?: unknown
+ options?: Array<{ label: string; value: string }> | (() => Array<{ label: string; value: string }>)
+ showIf?: Array<{ path?: string; name?: string; equals: string | number | boolean }>
+ credentialType?: CredentialType
+ }
+
+ // Type guard function to check if parameter has required properties
+ const isValidParameter = (param: unknown): param is ExtendedParameterDefinition => {
+ if (!param || typeof param !== 'object') return false
+ const p = param as Record
+ return typeof p.type === 'string' &&
+ typeof p.label === 'string' &&
+ typeof p.path === 'string'
+ }
+
+ const parameters = def.parameters.filter(isValidParameter) as ExtendedParameterDefinition[]
const FieldLabel = ({ text, description, htmlFor }: { text: string; description?: string; htmlFor?: string }) => (
@@ -187,78 +292,139 @@ export function NodeConfigPanel() {
)}
- {def.parameters.map((param) => {
- const shouldShow = !param.showIf || param.showIf.length === 0
- ? true
- : param.showIf.some((cond) => getValueAtPath(data.config as Record, cond.path) === cond.equals)
+ {parameters.map((param) => {
+ // Type-safe showIf condition checking with runtime guards
+ const shouldShow = (() => {
+ // Check if showIf exists and is an array
+ if (!Array.isArray(param.showIf) || param.showIf.length === 0) {
+ return true
+ }
+
+ // Safely assert selectedNode.data.config exists
+ const config = selectedNode?.data?.config
+ if (!config || typeof config !== 'object') {
+ return true // Show by default if config is invalid
+ }
+
+ // Check if any condition matches with safe predicate
+ return param.showIf.some((cond) => {
+ // Verify cond is an object and has required properties
+ if (!cond || typeof cond !== 'object') {
+ return false
+ }
+
+ // Type guard for condition structure
+ const isValidCondition = (c: unknown): c is { path?: string; name?: string; equals: string | number | boolean } => {
+ if (!c || typeof c !== 'object') return false
+ const condition = c as Record
+
+ // Must have either path or name (but not both) as strings
+ const hasPath = typeof condition.path === 'string'
+ const hasName = typeof condition.name === 'string'
+ const hasEquals = condition.equals !== undefined
+
+ return (hasPath || hasName) && hasEquals && !(hasPath && hasName)
+ }
+
+ if (!isValidCondition(cond)) {
+ return false
+ }
+
+ // Extract the path or name safely
+ const pathToCheck = cond.path || cond.name || ''
+ if (!pathToCheck) {
+ return false
+ }
+
+ return pathValueEquals(config as Record, pathToCheck, cond.equals)
+ })
+ })()
+
if (!shouldShow) return null
- const value = getValueAtPath(data.config as Record, param.path)
switch (param.type) {
- case 'select':
+ case 'select': {
+ const config = (selectedNode.data.config as Record) || {}
+ const paramPath = param.path
+ const currentValue = getValueAtPath(config, paramPath)
+ const defaultVal = getSafeDefaultValue(param.default, 'string')
+ const value = typeof currentValue === 'string' ? currentValue : defaultVal
+ const description = getSafeDescription(param.description)
return (
-
-
+
+
)
+ }
case 'string':
+ case 'text': {
+ // Allow both 'string' and 'text' parameter types for compatibility
+ const paramPath = param.path
+ const value = getParamValue(paramPath, 'string', param.default)
+ const description = getSafeDescription(param.description)
return (
-
-
+
+
handleConfigChange(param.path, e.target.value)}
- placeholder={param.description}
+ value={String(value)}
+ onChange={(e) => handleConfigChange(paramPath, e.target.value)}
+ placeholder={description}
className="bg-white text-gray-900 placeholder:text-gray-400 border-gray-300"
/>
)
- case 'textarea':
+ }
+ case 'textarea': {
+ const value = getParamValue(param.path, 'string', param.default)
+ const description = getSafeDescription(param.description)
return (
-
+
)
+ }
case 'json': {
- const path = param.path
+ const paramPath = param.path
// Friendly editors for headers/body
- const isHeaders = path === 'headers'
- const isBody = path === 'body'
+ const isHeaders = paramPath === 'headers'
+ const isBody = paramPath === 'body'
if (isHeaders || isBody) {
- const initialRows = Array.isArray(kvStateByPath[path])
- ? kvStateByPath[path]
- : Object.entries(((value as Record
) || {})).map(([k, v]) => ({ id: `${k}-${Math.random().toString(36).slice(2)}`, key: k, value: String(v) }))
+ const objectValue = getObjectValue>(selectedNode.data.config as Record, param.path, {})
+ const existingKvState = kvStateByPath[paramPath as keyof typeof kvStateByPath]
+ const initialRows = Array.isArray(existingKvState)
+ ? existingKvState
+ : Object.entries(objectValue).map(([k, v]) => ({ id: `${k}-${Math.random().toString(36).slice(2)}`, key: k, value: String(v) }))
let rows = initialRows
if (!rows || rows.length === 0) {
rows = [{ id: 'new', key: '', value: '' }]
}
const setRows = (next: { id: string; key: string; value: string }[]) => {
- setKvStateByPath((s) => ({ ...s, [path]: next }))
+ setKvStateByPath((s) => ({ ...s, [paramPath]: next }))
const obj: Record = {}
next.forEach((r) => {
const k = r.key.trim()
if (k) obj[k] = r.value
})
- handleConfigChange(path, obj)
+ handleConfigChange(paramPath, obj)
}
const addRow = () => setRows([...(rows || []), { id: Math.random().toString(36).slice(2), key: '', value: '' }])
const removeRow = (id: string) => setRows((rows || []).filter((r) => r.id !== id))
@@ -270,7 +436,7 @@ export function NodeConfigPanel() {
})
return (
-
+
{(rows || []).map((row) => (
@@ -317,7 +483,53 @@ export function NodeConfigPanel() {
)
}
+ const defaultValue = getSafeDefaultValue(param.default, 'object')
+ const jsonValue = getObjectValue(selectedNode.data.config as Record
, param.path, defaultValue)
+ const existingJsonText = jsonTextByPath[paramPath as keyof typeof jsonTextByPath]
const displayText =
- typeof jsonTextByPath[path] === 'string'
- ? jsonTextByPath[path]
- : JSON.stringify(value ?? param.default ?? {}, null, 2)
+ typeof existingJsonText === 'string'
+ ? existingJsonText
+ : JSON.stringify(jsonValue, null, 2)
return (
-
+
)
}
- case 'stringList':
+ case 'stringList': {
+ const arrayValue = getArrayValue(selectedNode.data.config as Record, param.path, [])
+ const description = getSafeDescription(param.description)
return (
-
+
handleConfigChange(param.path, e.target.value.split(',').map((s) => s.trim()).filter(Boolean))}
- placeholder={param.description || 'first@email.com, next@email.com'}
+ placeholder={description || 'first@email.com, next@email.com'}
className="bg-white text-gray-900 placeholder:text-gray-400 border-gray-300"
/>
)
- case 'number':
+ }
+ case 'number': {
+ const numberValue = getParamValue(param.path, 'number', param.default)
+ const description = getSafeDescription(param.description)
return (
-
+
handleConfigChange(param.path, Number(e.target.value || 0))}
- placeholder={param.description}
+ placeholder={description}
className="bg-white text-gray-900 placeholder:text-gray-400 border-gray-300"
/>
)
- case 'boolean':
+ }
+ case 'boolean': {
+ const booleanValue = getParamValue(param.path, 'boolean', param.default)
+ const description = getSafeDescription(param.description)
return (
handleConfigChange(param.path, e.target.checked)}
/>
-
+
)
- case 'email':
+ }
+ case 'email': {
+ const emailValue = getParamValue(param.path, 'string', param.default)
+ const description = getSafeDescription(param.description)
+ const placeholder = getSafePlaceholder(param.placeholder)
return (
-
+
handleConfigChange(param.path, e.target.value)}
- placeholder={param.placeholder || 'Enter email address'}
+ placeholder={placeholder || 'Enter email address'}
className="bg-white text-gray-900 placeholder:text-gray-400 border-gray-300"
/>
)
- case 'password':
+ }
+ case 'password': {
+ const passwordValue = getParamValue(param.path, 'string', param.default)
+ const description = getSafeDescription(param.description)
+ const placeholder = getSafePlaceholder(param.placeholder)
return (
-
+
handleConfigChange(param.path, e.target.value)}
- placeholder={param.placeholder || 'Enter password'}
+ placeholder={placeholder || 'Enter password'}
className="bg-white text-gray-900 placeholder:text-gray-400 border-gray-300"
/>
)
- case 'url':
+ }
+ case 'url': {
+ const urlValue = getParamValue(param.path, 'string', param.default)
+ const description = getSafeDescription(param.description)
+ const placeholder = getSafePlaceholder(param.placeholder)
return (
-
+
handleConfigChange(param.path, e.target.value)}
- placeholder={param.placeholder || 'Enter URL'}
+ placeholder={placeholder || 'Enter URL'}
className="bg-white text-gray-900 placeholder:text-gray-400 border-gray-300"
/>
)
- case 'text':
+ }
+
+ case 'credential': {
+ const credentialValue = getParamValue(param.path, 'string', param.default)
+ const description = getSafeDescription(param.description)
+ const placeholder = getSafePlaceholder(param.placeholder)
+ const credentialType = toCredentialType(param.credentialType)
return (
-
- handleConfigChange(param.path, e.target.value)}
- placeholder={param.placeholder || param.description}
- className="bg-white text-gray-900 placeholder:text-gray-400 border-gray-300"
+
+ handleConfigChange(param.path, credentialId)}
+ credentialType={credentialType}
+ placeholder={placeholder || 'Select a credential'}
+ className="w-full"
/>
)
+ }
default:
return null
}
@@ -763,7 +1005,7 @@ export function NodeConfigPanel() {
Webhook URL:
- {`${typeof window !== 'undefined' ? window.location.origin : ''}/api/webhooks/${typeof window !== 'undefined' ? (new URLSearchParams(window.location.search).get('workflowId') || '') : ''}`}
+ {`${typeof window !== 'undefined' ? window.location.origin : ''}/api/webhooks/${getSafeWorkflowIdFromUrl()}`}
Send {config.method || 'POST'} requests to this URL to trigger the workflow.
@@ -931,7 +1173,7 @@ export function NodeConfigPanel() {
{selectedNode.data.label}
-
+
{renderConfigContent()}
diff --git a/hooks/use-workflow-store.ts b/hooks/use-workflow-store.ts
index eddea69..64d11cb 100644
--- a/hooks/use-workflow-store.ts
+++ b/hooks/use-workflow-store.ts
@@ -5,7 +5,125 @@ import { applyNodeChanges, applyEdgeChanges, OnNodesChange, OnEdgesChange, Conne
import { v4 as uuidv4 } from 'uuid'
import { Workflow, WorkflowNode, WorkflowEdge, WorkflowExecution, ExecutionLog } from '@/types/workflow'
import { executeWorkflow as executeWorkflowAction, stopWorkflowExecution } from '@/lib/workflow-actions'
-import { encryptEmailConfig, decryptEmailConfig, clearSensitiveData } from '@/lib/security'
+import { encryptEmailConfig, decryptEmailConfig, decryptDatabaseConfig, clearSensitiveData } from '@/lib/security'
+import { ActionType } from '@/types/workflow'
+import { migrateWorkflowNode } from '@/lib/migration-utils'
+
+// Helper function to encrypt node configs based on their type
+function encryptNodeConfig(node: WorkflowNode): WorkflowNode {
+ if (node.data.config && typeof node.data.config === 'object') {
+ const config = node.data.config as Record
+ let encryptedConfig: Record = config
+
+ // Apply type-specific encryption
+ if (node.data.nodeType === 'action') {
+ const actionNode = node.data as { actionType: ActionType }
+ switch (actionNode.actionType) {
+ case ActionType.EMAIL:
+ try {
+ encryptedConfig = encryptEmailConfig(config)
+ } catch {
+ encryptedConfig = { ...config }
+ }
+ break
+ case ActionType.DATABASE:
+ // Database configs don't need encryption here since they use credentialId references
+ // Keep the config as-is (the actual connection string is encrypted in credential store)
+ encryptedConfig = { ...config }
+ break
+ default:
+ // For unknown/unsupported action types, preserve the original config
+ encryptedConfig = { ...config }
+ break
+ }
+ }
+
+ return {
+ ...node,
+ data: {
+ ...node.data,
+ config: encryptedConfig
+ }
+ }
+ }
+ return node
+}
+
+// Helper function to decrypt node configs based on their type and handle migration
+function decryptNodeConfig(node: WorkflowNode): WorkflowNode {
+ // First, apply any necessary migrations
+ const migratedNode = migrateWorkflowNode(node)
+
+ if (migratedNode.data.config && typeof migratedNode.data.config === 'object') {
+ const config = migratedNode.data.config as Record
+ let decryptedConfig: Record = { ...config }
+
+ // Apply type-specific decryption
+ if (migratedNode.data.nodeType === 'action') {
+ const actionNode = migratedNode.data as { actionType: ActionType }
+ switch (actionNode.actionType) {
+ case ActionType.EMAIL:
+ try {
+ decryptedConfig = decryptEmailConfig(config)
+ } catch (error) {
+ console.warn('Failed to decrypt email config, using fallback:', error)
+ decryptedConfig = { ...config }
+ }
+ break
+ case ActionType.DATABASE:
+ try {
+ decryptedConfig = decryptDatabaseConfig(config)
+ } catch (error) {
+ console.warn('Failed to decrypt database config, using fallback:', error)
+ decryptedConfig = { ...config }
+ }
+ break
+ case ActionType.HTTP:
+ try {
+ // HTTP configs may contain sensitive headers or auth data
+ // For now, keep the config as-is since there's no specific HTTP decryption
+ decryptedConfig = { ...config }
+ } catch (error) {
+ console.warn('Failed to process HTTP config, using fallback:', error)
+ decryptedConfig = { ...config }
+ }
+ break
+ case ActionType.TRANSFORM:
+ try {
+ // Transform configs typically don't contain sensitive data
+ decryptedConfig = { ...config }
+ } catch (error) {
+ console.warn('Failed to process transform config, using fallback:', error)
+ decryptedConfig = { ...config }
+ }
+ break
+ case ActionType.DELAY:
+ try {
+ // Delay configs typically don't contain sensitive data
+ decryptedConfig = { ...config }
+ } catch (error) {
+ console.warn('Failed to process delay config, using fallback:', error)
+ decryptedConfig = { ...config }
+ }
+ break
+ default:
+ // Fallback for unknown action types - always provide a safe config
+ console.warn('Unknown action type encountered, using safe config fallback:', actionNode.actionType)
+ decryptedConfig = { ...config }
+ break
+ }
+ }
+
+ return {
+ ...migratedNode,
+ data: {
+ ...migratedNode.data,
+ config: decryptedConfig
+ }
+ }
+ }
+ return migratedNode
+}
interface WorkflowStore {
// Current workflow
@@ -66,18 +184,7 @@ export const useWorkflowStore = create((set, get) => ({
if (!workflow) return
// Encrypt sensitive data in nodes before saving
- const encryptedNodes = nodes.map(node => {
- if (node.data.config && typeof node.data.config === 'object') {
- return {
- ...node,
- data: {
- ...node.data,
- config: encryptEmailConfig(node.data.config as Record)
- }
- }
- }
- return node
- })
+ const encryptedNodes = nodes.map(encryptNodeConfig)
const draft = {
...workflow,
@@ -111,18 +218,7 @@ export const useWorkflowStore = create((set, get) => ({
// Workflow management
setWorkflow: (workflow) => {
// Decrypt credentials when loading workflow
- const decryptedNodes = workflow.nodes.map(node => {
- if (node.data.config && typeof node.data.config === 'object') {
- return {
- ...node,
- data: {
- ...node.data,
- config: decryptEmailConfig(node.data.config as Record)
- }
- }
- }
- return node
- })
+ const decryptedNodes = workflow.nodes.map(decryptNodeConfig)
set({
workflow,
@@ -135,18 +231,7 @@ export const useWorkflowStore = create((set, get) => ({
const { nodes, edges } = get()
// Encrypt before storing
- const encryptedNodes = nodes.map(node => {
- if (node.data.config && typeof node.data.config === 'object') {
- return {
- ...node,
- data: {
- ...node.data,
- config: encryptEmailConfig(node.data.config as Record)
- }
- }
- }
- return node
- })
+ const encryptedNodes = nodes.map(encryptNodeConfig)
const draft = { ...workflow, nodes: encryptedNodes, edges, updatedAt: new Date() }
sessionStorage.setItem('workflowDraft', JSON.stringify(draft))
@@ -171,8 +256,8 @@ export const useWorkflowStore = create((set, get) => ({
edges: [],
})
try {
- localStorage.setItem('lastOpenedWorkflowId', newWorkflow.id)
- localStorage.setItem('workflowDraft', JSON.stringify(newWorkflow))
+ sessionStorage.setItem('lastOpenedWorkflowId', newWorkflow.id)
+ sessionStorage.setItem('workflowDraft', JSON.stringify(newWorkflow))
} catch (err) {
console.debug('create draft failed', err)
}
@@ -183,18 +268,7 @@ export const useWorkflowStore = create((set, get) => ({
if (!workflow) return
// Encrypt sensitive data before saving
- const encryptedNodes = nodes.map(node => {
- if (node.data.config && typeof node.data.config === 'object') {
- return {
- ...node,
- data: {
- ...node.data,
- config: encryptEmailConfig(node.data.config as Record)
- }
- }
- }
- return node
- })
+ const encryptedNodes = nodes.map(encryptNodeConfig)
const updatedWorkflow: Workflow = {
...workflow,
diff --git a/lib/credential-store.test.ts b/lib/credential-store.test.ts
new file mode 100644
index 0000000..11d650d
--- /dev/null
+++ b/lib/credential-store.test.ts
@@ -0,0 +1,128 @@
+import { describe, it, expect, beforeEach, vi } from 'vitest'
+import { credentialStore, migrateConnectionStringToCredential, resolveConnectionString } from './credential-store'
+
+// Mock sessionStorage
+const mockSessionStorage = {
+ getItem: vi.fn(),
+ setItem: vi.fn(),
+ removeItem: vi.fn(),
+ clear: vi.fn(),
+ length: 0,
+ key: vi.fn()
+}
+
+// Mock the security module
+vi.mock('./security', () => ({
+ encryptCredential: vi.fn((value: string) => `encrypted_${value}`),
+ decryptCredential: vi.fn((value: string) => value.replace('encrypted_', '')),
+ isEncrypted: vi.fn((value: string) => value.startsWith('encrypted_'))
+}))
+
+// Set up sessionStorage mock
+Object.defineProperty(globalThis, 'sessionStorage', {
+ value: mockSessionStorage,
+ writable: true
+})
+
+describe('credential-store', () => {
+ beforeEach(() => {
+ // Clear sessionStorage before each test
+ credentialStore.clearAllCredentials()
+ vi.clearAllMocks()
+ })
+
+ describe('migrateConnectionStringToCredential', () => {
+ it('should throw error for empty connection string', () => {
+ expect(() => migrateConnectionStringToCredential('')).toThrow('Connection string is required')
+ })
+
+ it('should throw error for whitespace-only connection string', () => {
+ expect(() => migrateConnectionStringToCredential(' ')).toThrow('Connection string is required')
+ })
+
+ it('should throw error for null connection string', () => {
+ expect(() => migrateConnectionStringToCredential(null as unknown as string)).toThrow('Connection string is required')
+ })
+
+ it('should return credential ID if connection string is already a valid credential ID', () => {
+ const existingCredentialId = 'cred_12345678-1234-1234-1234-123456789abc'
+ // Mock isValidCredentialId to return true for this ID
+ vi.spyOn(credentialStore, 'isValidCredentialId').mockReturnValue(true)
+
+ const result = migrateConnectionStringToCredential(existingCredentialId)
+ expect(result).toBe(existingCredentialId)
+ })
+
+ it('should create new credential for valid connection string', () => {
+ const connectionString = 'postgresql://user:pass@localhost:5432/db'
+ const mockCredentialId = 'cred_12345678-1234-1234-1234-123456789abc'
+
+ // Mock the credential store methods
+ vi.spyOn(credentialStore, 'isValidCredentialId').mockReturnValue(false)
+ vi.spyOn(credentialStore, 'storeCredential').mockReturnValue(mockCredentialId)
+
+ const result = migrateConnectionStringToCredential(connectionString, 'Test Connection')
+
+ expect(credentialStore.storeCredential).toHaveBeenCalledWith(
+ 'Test Connection',
+ connectionString,
+ 'database',
+ 'Migrated from plain connection string'
+ )
+ expect(result).toBe(mockCredentialId)
+ })
+ })
+
+ describe('resolveConnectionString', () => {
+ it('should throw error for empty credential ID or plain string', () => {
+ expect(() => resolveConnectionString('')).toThrow('Connection string is required')
+ })
+
+ it('should throw error for whitespace-only credential ID or plain string', () => {
+ expect(() => resolveConnectionString(' ')).toThrow('Connection string is required')
+ })
+
+ it('should throw error for null credential ID or plain string', () => {
+ expect(() => resolveConnectionString(null as unknown as string)).toThrow('Connection string is required')
+ })
+
+ it('should resolve credential value for valid credential ID', () => {
+ const credentialId = 'cred_12345678-1234-1234-1234-123456789abc'
+ const expectedValue = 'postgresql://user:pass@localhost:5432/db'
+
+ // Mock the credential store methods
+ vi.spyOn(credentialStore, 'isValidCredentialId').mockReturnValue(true)
+ vi.spyOn(credentialStore, 'getCredentialValue').mockReturnValue(expectedValue)
+
+ const result = resolveConnectionString(credentialId)
+
+ expect(credentialStore.isValidCredentialId).toHaveBeenCalledWith(credentialId)
+ expect(credentialStore.getCredentialValue).toHaveBeenCalledWith(credentialId)
+ expect(result).toBe(expectedValue)
+ })
+
+ it('should return plain string as-is for backward compatibility', () => {
+ const plainConnectionString = 'postgresql://user:pass@localhost:5432/db'
+
+ // Mock isValidCredentialId to return false for plain string
+ vi.spyOn(credentialStore, 'isValidCredentialId').mockReturnValue(false)
+
+ const result = resolveConnectionString(plainConnectionString)
+
+ expect(credentialStore.isValidCredentialId).toHaveBeenCalledWith(plainConnectionString)
+ expect(result).toBe(plainConnectionString)
+ })
+
+ it('should return null if credential value cannot be retrieved', () => {
+ const credentialId = 'cred_12345678-1234-1234-1234-123456789abc'
+
+ // Mock the credential store methods
+ vi.spyOn(credentialStore, 'isValidCredentialId').mockReturnValue(true)
+ vi.spyOn(credentialStore, 'getCredentialValue').mockReturnValue(null)
+
+ const result = resolveConnectionString(credentialId)
+
+ expect(result).toBeNull()
+ })
+ })
+})
diff --git a/lib/credential-store.ts b/lib/credential-store.ts
new file mode 100644
index 0000000..3b9afd2
--- /dev/null
+++ b/lib/credential-store.ts
@@ -0,0 +1,320 @@
+/**
+ * Secure credential store for managing database connection strings and other secrets
+ */
+
+import { encryptCredential, decryptCredential, isEncrypted } from './security'
+import { CredentialType } from '@/types/credentials'
+
+export interface StoredCredential {
+ id: string
+ name: string
+ type: CredentialType
+ description?: string
+ encryptedValue: string
+ createdAt: Date
+ updatedAt: Date
+ metadata?: Record
+}
+
+export interface CredentialReference {
+ credentialId: string
+ type: CredentialType
+}
+
+class CredentialStore {
+ private storageKey = 'credentialStore'
+
+ /**
+ * Get all stored credentials (metadata only, not the actual values)
+ */
+ getAllCredentials(): Omit[] {
+ try {
+ const stored = sessionStorage.getItem(this.storageKey)
+ if (!stored) return []
+
+ const credentials: StoredCredential[] = JSON.parse(stored) as StoredCredential[]
+ return credentials.map(({ encryptedValue, ...rest }) => ({
+ ...rest,
+ createdAt: new Date(rest.createdAt),
+ updatedAt: new Date(rest.updatedAt)
+ }))
+ } catch (error) {
+ console.error('Failed to retrieve credentials:', error)
+ return []
+ }
+ }
+
+ /**
+ * Get a specific credential by ID
+ */
+ getCredential(id: string): StoredCredential | null {
+ try {
+ const stored = sessionStorage.getItem(this.storageKey)
+ if (!stored) return null
+
+ const credentials: StoredCredential[] = JSON.parse(stored) as StoredCredential[]
+ const credential = credentials.find(c => c.id === id)
+
+ if (!credential) return null
+
+ return {
+ ...credential,
+ createdAt: new Date(credential.createdAt),
+ updatedAt: new Date(credential.updatedAt)
+ }
+ } catch (error) {
+ console.error('Failed to retrieve credential:', error)
+ return null
+ }
+ }
+
+ /**
+ * Get the decrypted value of a credential
+ */
+ getCredentialValue(id: string): string | null {
+ const credential = this.getCredential(id)
+ if (!credential) return null
+
+ try {
+ const decrypted = decryptCredential(credential.encryptedValue)
+
+ // Fail closed: if the decrypted value still looks like ciphertext,
+ // it means decryption failed silently
+ if (isEncrypted(decrypted)) {
+ console.error('Failed to decrypt credential - result still appears encrypted')
+ return null
+ }
+
+ return decrypted
+ } catch (error) {
+ console.error('Failed to decrypt credential:', error)
+ return null
+ }
+ }
+
+ /**
+ * Store a new credential
+ */
+ storeCredential(
+ name: string,
+ value: string,
+ type: StoredCredential['type'],
+ description?: string,
+ metadata?: Record
+ ): string {
+ try {
+ const encryptedValue = encryptCredential(value)
+
+ // Security check: detect encryption failure
+ // If encryption failed, encryptCredential returns the original plaintext
+ if (!encryptedValue || encryptedValue === value || !isEncrypted(encryptedValue)) {
+ console.error('Failed to encrypt credential - refusing to store plaintext')
+ throw new Error('Credential encryption failed - cannot store plaintext secrets')
+ }
+
+ const id = `cred_${crypto.randomUUID()}`
+ const now = new Date()
+
+ const newCredential: StoredCredential = {
+ id,
+ name,
+ type,
+ description,
+ encryptedValue,
+ createdAt: now,
+ updatedAt: now,
+ metadata
+ }
+
+ const existing = this.getAllCredentialsWithValues()
+ existing.push(newCredential)
+
+ sessionStorage.setItem(this.storageKey, JSON.stringify(existing))
+ return id
+ } catch (error) {
+ console.error('Failed to store credential:', error)
+ throw new Error('Failed to store credential')
+ }
+ }
+
+ /**
+ * Update an existing credential
+ */
+ updateCredential(
+ id: string,
+ updates: Partial>
+ ): boolean {
+ try {
+ const credentials = this.getAllCredentialsWithValues()
+ const index = credentials.findIndex(c => c.id === id)
+
+ if (index === -1) return false
+
+ credentials[index] = {
+ ...credentials[index],
+ ...updates,
+ updatedAt: new Date()
+ }
+
+ sessionStorage.setItem(this.storageKey, JSON.stringify(credentials))
+ return true
+ } catch (error) {
+ console.error('Failed to update credential:', error)
+ return false
+ }
+ }
+
+ /**
+ * Update credential value (re-encrypt)
+ */
+ updateCredentialValue(id: string, newValue: string): boolean {
+ try {
+ // Step 1: Encrypt the new value BEFORE any mutation
+ const encryptedValue = encryptCredential(newValue)
+
+ // Step 2: Validate encryption success BEFORE any mutation
+ // If encryption failed, encryptCredential returns the original plaintext
+ if (!encryptedValue || encryptedValue === newValue || !isEncrypted(encryptedValue)) {
+ console.error('Failed to encrypt credential value - refusing to store plaintext')
+ throw new Error('Credential encryption failed - cannot store plaintext secrets')
+ }
+
+ // Step 3: Only after successful encryption, retrieve and modify credentials
+ const credentials = this.getAllCredentialsWithValues()
+ const index = credentials.findIndex(c => c.id === id)
+
+ if (index === -1) return false
+
+ // Step 4: Prepare the complete new credential object atomically
+ const updatedCredential: StoredCredential = {
+ ...credentials[index],
+ encryptedValue,
+ updatedAt: new Date()
+ }
+
+ // Step 5: Atomically replace the credential in the array
+ credentials[index] = updatedCredential
+
+ // Step 6: Persist to storage
+ sessionStorage.setItem(this.storageKey, JSON.stringify(credentials))
+ return true
+ } catch (error) {
+ console.error('Failed to update credential value:', error)
+ return false
+ }
+ }
+
+ /**
+ * Delete a credential
+ */
+ deleteCredential(id: string): boolean {
+ try {
+ const credentials = this.getAllCredentialsWithValues()
+ const filtered = credentials.filter(c => c.id !== id)
+
+ if (filtered.length === credentials.length) return false // Not found
+
+ sessionStorage.setItem(this.storageKey, JSON.stringify(filtered))
+ return true
+ } catch (error) {
+ console.error('Failed to delete credential:', error)
+ return false
+ }
+ }
+
+ /**
+ * Validate credential ID format
+ */
+ isValidCredentialId(id: string): boolean {
+ return /^cred_[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i.test(id)
+ }
+
+ /**
+ * Check if credential exists
+ */
+ credentialExists(id: string): boolean {
+ return this.getCredential(id) !== null
+ }
+
+ /**
+ * Get credentials by type
+ */
+ getCredentialsByType(type: StoredCredential['type']): Omit[] {
+ return this.getAllCredentials().filter(c => c.type === type)
+ }
+
+ /**
+ * Clear all credentials (for cleanup)
+ */
+ clearAllCredentials(): void {
+ sessionStorage.removeItem(this.storageKey)
+ }
+
+ /**
+ * Private helper to get all credentials with encrypted values
+ */
+ private getAllCredentialsWithValues(): StoredCredential[] {
+ try {
+ const stored = sessionStorage.getItem(this.storageKey)
+ if (!stored) return []
+
+ const credentials: StoredCredential[] = JSON.parse(stored) as StoredCredential[]
+ return credentials.map(c => ({
+ ...c,
+ createdAt: new Date(c.createdAt),
+ updatedAt: new Date(c.updatedAt)
+ }))
+ } catch (error) {
+ console.error('Failed to retrieve credentials:', error)
+ return []
+ }
+ }
+}
+
+// Export singleton instance
+export const credentialStore = new CredentialStore()
+
+/**
+ * Migration utility to convert plain connection strings to credential references
+ */
+export function migrateConnectionStringToCredential(
+ connectionString: string,
+ name?: string
+): string {
+ if (!connectionString || connectionString.trim().length === 0) {
+ throw new Error('Connection string is required')
+ }
+
+ // Check if it's already a credential reference
+ if (credentialStore.isValidCredentialId(connectionString)) {
+ return connectionString
+ }
+
+ // Create a new credential
+ const credentialName = name || `Database Connection ${new Date().toISOString()}`
+ const credentialId = credentialStore.storeCredential(
+ credentialName,
+ connectionString,
+ 'database',
+ 'Migrated from plain connection string'
+ )
+
+ return credentialId
+}
+
+/**
+ * Utility to get connection string from credential reference
+ */
+export function resolveConnectionString(credentialIdOrPlainString: string): string | null {
+ if (!credentialIdOrPlainString || credentialIdOrPlainString.trim().length === 0) {
+ throw new Error('Connection string is required')
+ }
+
+ // Check if it's a credential reference
+ if (credentialStore.isValidCredentialId(credentialIdOrPlainString)) {
+ return credentialStore.getCredentialValue(credentialIdOrPlainString)
+ }
+
+ // Return as-is for backward compatibility (will be migrated on next save)
+ return credentialIdOrPlainString
+}
diff --git a/lib/legacy-migration-helper.ts b/lib/legacy-migration-helper.ts
new file mode 100644
index 0000000..626d50d
--- /dev/null
+++ b/lib/legacy-migration-helper.ts
@@ -0,0 +1,262 @@
+/**
+ * Helper utilities for identifying and migrating legacy configurations
+ * This can be used by administrators or users to check for and migrate legacy data
+ */
+
+import { Workflow, WorkflowNode, ActionType } from '@/types/workflow'
+import { DatabaseNodeConfig } from '@/nodes/DatabaseNode/DatabaseNode.types'
+import { DelayNodeConfig } from '@/nodes/DelayNode/DelayNode.types'
+import { needsDatabaseMigration, needsDelayMigration, migrateWorkflowNode } from './migration-utils'
+
+export interface LegacyConfigReport {
+ workflowId: string
+ workflowName: string
+ nodeId: string
+ nodeLabel: string
+ configType: 'database' | 'delay' | 'email' | 'other'
+ issue: string
+ canAutoMigrate: boolean
+}
+
+/**
+ * Scan a workflow for legacy configurations that need migration
+ */
+export function scanWorkflowForLegacyConfigs(workflow: Workflow): LegacyConfigReport[] {
+ const issues: LegacyConfigReport[] = []
+
+ workflow.nodes.forEach(node => {
+ const report = scanNodeForLegacyConfigs(workflow.id, workflow.name, node)
+ if (report) {
+ issues.push(report)
+ }
+ })
+
+ return issues
+}
+
+/**
+ * Scan a single node for legacy configuration issues
+ */
+function scanNodeForLegacyConfigs(workflowId: string, workflowName: string, node: WorkflowNode): LegacyConfigReport | null {
+ if (node.data.nodeType !== 'action') {
+ return null
+ }
+
+ const actionNode = node.data as { actionType: ActionType; config: Record }
+
+ switch (actionNode.actionType) {
+ case ActionType.DATABASE: {
+ const config = actionNode.config as DatabaseNodeConfig & Record
+
+ if (needsDatabaseMigration(config)) {
+ return {
+ workflowId,
+ workflowName,
+ nodeId: node.id,
+ nodeLabel: node.data.label || 'Database Node',
+ configType: 'database',
+ issue: 'Uses legacy connectionString instead of secure credential reference',
+ canAutoMigrate: true
+ }
+ }
+
+ // Check for legacy connectionString that should be cleaned up
+ if (config.credentialId && config.connectionString) {
+ return {
+ workflowId,
+ workflowName,
+ nodeId: node.id,
+ nodeLabel: node.data.label || 'Database Node',
+ configType: 'database',
+ issue: 'Contains both credentialId and legacy connectionString fields',
+ canAutoMigrate: true
+ }
+ }
+ break
+ }
+
+ case ActionType.DELAY: {
+ const config = actionNode.config as DelayNodeConfig & Record
+
+ // Check for legacy delayMs config (inline check to avoid ESLint issues)
+ const hasValueAndUnit = typeof config.value === 'number' && config.unit
+ const hasLegacyDelayMs = typeof config.delayMs === 'number'
+
+ // Needs migration if it has delayMs but missing value/unit
+ if (hasLegacyDelayMs && !hasValueAndUnit) {
+ return {
+ workflowId,
+ workflowName,
+ nodeId: node.id,
+ nodeLabel: node.data.label || 'Delay Node',
+ configType: 'delay',
+ issue: 'Uses legacy delayMs instead of value+unit pattern',
+ canAutoMigrate: true
+ }
+ }
+
+ // Check for legacy delayMs that should be cleaned up
+ if (hasValueAndUnit && hasLegacyDelayMs) {
+ return {
+ workflowId,
+ workflowName,
+ nodeId: node.id,
+ nodeLabel: node.data.label || 'Delay Node',
+ configType: 'delay',
+ issue: 'Contains both value+unit and legacy delayMs fields',
+ canAutoMigrate: true
+ }
+ }
+ break
+ }
+
+ case ActionType.EMAIL: {
+ // Add email legacy config checks here if needed
+ break
+ }
+
+ // Add other action types as needed
+ }
+
+ return null
+}
+
+/**
+ * Automatically migrate a workflow's legacy configurations
+ */
+export function migrateWorkflowLegacyConfigs(workflow: Workflow): {
+ migratedWorkflow: Workflow
+ migrationsApplied: number
+ errors: string[]
+} {
+ const errors: string[] = []
+ let migrationsApplied = 0
+ let migratedNodes: WorkflowNode[] = workflow.nodes
+
+ try {
+ migratedNodes = workflow.nodes.map(node => {
+ const originalNode = node
+ const migratedNode = migrateWorkflowNode(node)
+
+ if (migratedNode !== originalNode) {
+ migrationsApplied++
+ console.log(`Migrated node ${node.id} (${node.data.label || 'Unnamed'})`)
+ }
+
+ return migratedNode
+ })
+
+ const migratedWorkflow: Workflow = {
+ ...workflow,
+ nodes: migratedNodes,
+ updatedAt: new Date()
+ }
+
+ return {
+ migratedWorkflow,
+ migrationsApplied,
+ errors
+ }
+ } catch (error) {
+ errors.push(`Migration failed: ${error instanceof Error ? error.message : 'Unknown error'}`)
+ return {
+ migratedWorkflow: {
+ ...workflow,
+ nodes: migratedNodes,
+ updatedAt: migrationsApplied > 0 ? new Date() : workflow.updatedAt
+ },
+ migrationsApplied,
+ errors
+ }
+ }
+}
+
+/**
+ * Generate a migration report for multiple workflows
+ */
+export function generateMigrationReport(workflows: Workflow[]): {
+ totalWorkflows: number
+ workflowsWithIssues: number
+ totalIssues: number
+ issuesByType: Record
+ canAutoMigrateAll: boolean
+ report: LegacyConfigReport[]
+} {
+ const allIssues: LegacyConfigReport[] = []
+ const issuesByType: Record = {}
+
+ workflows.forEach(workflow => {
+ const workflowIssues = scanWorkflowForLegacyConfigs(workflow)
+ allIssues.push(...workflowIssues)
+ })
+
+ // Count issues by type
+ allIssues.forEach(issue => {
+ issuesByType[issue.configType] = (issuesByType[issue.configType] || 0) + 1
+ })
+
+ const workflowsWithIssues = new Set(allIssues.map(i => i.workflowId)).size
+ const canAutoMigrateAll = allIssues.every(issue => issue.canAutoMigrate)
+
+ return {
+ totalWorkflows: workflows.length,
+ workflowsWithIssues,
+ totalIssues: allIssues.length,
+ issuesByType,
+ canAutoMigrateAll,
+ report: allIssues
+ }
+}
+
+/**
+ * Display migration report in a readable format
+ */
+export function formatMigrationReport(report: ReturnType): string {
+ const lines: string[] = []
+
+ lines.push('=== Legacy Configuration Migration Report ===')
+ lines.push(`Total workflows scanned: ${report.totalWorkflows}`)
+ lines.push(`Workflows with issues: ${report.workflowsWithIssues}`)
+ lines.push(`Total issues found: ${report.totalIssues}`)
+ lines.push('')
+
+ if (report.totalIssues === 0) {
+ lines.push('✅ No legacy configurations found. All workflows are up to date!')
+ return lines.join('\n')
+ }
+
+ lines.push('Issues by type:')
+ Object.entries(report.issuesByType).forEach(([type, count]) => {
+ lines.push(` - ${type}: ${count} issue(s)`)
+ })
+ lines.push('')
+
+ if (report.canAutoMigrateAll) {
+ lines.push('✅ All issues can be automatically migrated.')
+ } else {
+ lines.push('⚠️ Some issues require manual intervention.')
+ }
+ lines.push('')
+
+ lines.push('Detailed issues:')
+ report.report.forEach((issue, index) => {
+ lines.push(`${index + 1}. ${issue.workflowName} > ${issue.nodeLabel}`)
+ lines.push(` Issue: ${issue.issue}`)
+ lines.push(` Auto-migrate: ${issue.canAutoMigrate ? 'Yes' : 'No'}`)
+ lines.push('')
+ })
+
+ return lines.join('\n')
+}
+
+/**
+ * Get deprecation warnings for direct storage patterns
+ */
+export function getDeprecationWarnings(): string[] {
+ return [
+ 'DEPRECATED: Direct connectionString storage in database node configurations',
+ 'Use credentialId references to secure credential store instead',
+ 'Legacy connectionString values are automatically migrated on workflow load',
+ 'Update your workflows to use the new credential management system'
+ ]
+}
diff --git a/lib/migration-utils.ts b/lib/migration-utils.ts
new file mode 100644
index 0000000..90cadbc
--- /dev/null
+++ b/lib/migration-utils.ts
@@ -0,0 +1,200 @@
+/**
+ * Migration utilities for converting legacy configurations to secure credential references
+ */
+
+import { credentialStore, migrateConnectionStringToCredential } from './credential-store'
+import { WorkflowNode, ActionType } from '@/types/workflow'
+import { DatabaseNodeConfig } from '@/nodes/DatabaseNode/DatabaseNode.types'
+import { DelayNodeConfig } from '@/nodes/DelayNode/DelayNode.types'
+
+/**
+ * Migrate database node configuration from legacy connectionString to credential reference
+ */
+export function migrateDatabaseNodeConfig(config: DatabaseNodeConfig & Record): DatabaseNodeConfig & Record {
+ // If already using credentialId, no migration needed
+ if (config.credentialId && config.credentialId.trim().length > 0) {
+ // Clean up legacy connectionString if present
+ if (config.connectionString) {
+ const { connectionString, ...cleanConfig } = config
+ console.log('Cleaned up legacy connectionString field after migration')
+ return cleanConfig
+ }
+ return config
+ }
+
+ // If we have a legacy connectionString, migrate it
+ if (config.connectionString && config.connectionString.trim().length > 0) {
+ try {
+ const credentialName = `Database Connection (migrated ${new Date().toISOString().split('T')[0]})`
+ const credentialId = migrateConnectionStringToCredential(config.connectionString, credentialName)
+
+ // Return updated config with credentialId and without connectionString
+ const { connectionString, ...cleanConfig } = config
+ const migratedConfig = {
+ ...cleanConfig,
+ credentialId
+ }
+
+ console.log(`Migrated database connectionString to credential: ${credentialId}`)
+ return migratedConfig
+ } catch (error) {
+ console.error('Failed to migrate database connectionString:', error)
+ // Return original config if migration fails
+ return config
+ }
+ }
+
+ // No connectionString to migrate
+ return config
+}
+
+/**
+ * Migrate a workflow node if it has legacy configuration
+ */
+export function migrateWorkflowNode(node: WorkflowNode): WorkflowNode {
+ // Only process action nodes
+ if (node.data.nodeType !== 'action') {
+ return node
+ }
+
+ const actionNode = node.data as { actionType: ActionType; config: Record }
+ let migratedConfig = actionNode.config
+
+ // Handle database node migration
+ if (actionNode.actionType === ActionType.DATABASE) {
+ const originalConfig = actionNode.config as DatabaseNodeConfig & Record
+ migratedConfig = migrateDatabaseNodeConfig(originalConfig)
+ }
+
+ // Handle delay node migration
+ if (actionNode.actionType === ActionType.DELAY) {
+ const originalConfig = actionNode.config as DelayNodeConfig & Record
+ migratedConfig = migrateDelayNodeConfig(originalConfig)
+ }
+
+ // Return updated node if config changed
+ if (migratedConfig !== actionNode.config) {
+ return {
+ ...node,
+ data: {
+ ...node.data,
+ config: migratedConfig
+ }
+ }
+ }
+
+ return node
+}
+
+/**
+ * Check if a database node config needs migration
+ */
+export function needsDatabaseMigration(config: DatabaseNodeConfig & Record): boolean {
+ const hasCredentialId = Boolean(config.credentialId && config.credentialId.trim().length > 0)
+ const hasConnectionString = Boolean(config.connectionString && config.connectionString.trim().length > 0)
+
+ // Needs migration if it has connectionString but no credentialId
+ return !hasCredentialId && hasConnectionString
+}
+
+/**
+ * Validate a database node configuration (post-migration)
+ */
+export function validateDatabaseNodeConfig(config: DatabaseNodeConfig & Record): string[] {
+ const errors: string[] = []
+
+ const hasCredentialId = config.credentialId && config.credentialId.trim().length > 0
+ const hasConnectionString = config.connectionString && config.connectionString.trim().length > 0
+
+ // Must have either credentialId or connectionString
+ if (!hasCredentialId && !hasConnectionString) {
+ errors.push('Database credential is required')
+ return errors
+ }
+
+ // If using credentialId, validate it
+ if (hasCredentialId) {
+ if (!credentialStore.isValidCredentialId(config.credentialId)) {
+ errors.push('Invalid credential ID format')
+ } else if (!credentialStore.credentialExists(config.credentialId)) {
+ errors.push('Referenced credential does not exist')
+ }
+ }
+
+ // Warn about legacy connectionString
+ if (hasConnectionString && !hasCredentialId) {
+ console.warn('Database node is using legacy connectionString. Consider migrating to credential reference.')
+ }
+
+ return errors
+}
+
+/**
+ * Migrate DelayNode configuration from legacy delayMs to value+unit pattern
+ */
+export function migrateDelayNodeConfig(config: DelayNodeConfig & Record): DelayNodeConfig & Record {
+ // If already using the new pattern (has value and unit), no migration needed
+ if (typeof config.value === 'number' && config.unit) {
+ // Clean up legacy delayMs if present
+ if ('delayMs' in config) {
+ const { delayMs, ...cleanConfig } = config
+ console.log('Cleaned up legacy delayMs field after migration')
+ return cleanConfig
+ }
+ return config
+ }
+
+ // If we have a legacy delayMs, migrate it to value+unit
+ if (typeof config.delayMs === 'number' && config.delayMs > 0) {
+ try {
+ // Convert milliseconds to a reasonable unit
+ let value: number
+ let unit: 'milliseconds' | 'seconds' | 'minutes' | 'hours'
+
+ if (config.delayMs < 1000) {
+ // Less than 1 second - keep as milliseconds
+ value = config.delayMs
+ unit = 'milliseconds'
+ } else if (config.delayMs < 60000) {
+ // Less than 1 minute - convert to seconds
+ value = config.delayMs / 1000
+ unit = 'seconds'
+ } else if (config.delayMs < 3600000) {
+ // Less than 1 hour - convert to minutes
+ value = config.delayMs / 60000
+ unit = 'minutes'
+ } else {
+ // 1 hour or more - convert to hours
+ value = config.delayMs / 3600000
+ unit = 'hours'
+ }
+
+ // Remove delayMs and add value/unit
+ const { delayMs, ...cleanConfig } = config
+ const migratedConfig = {
+ ...cleanConfig,
+ value,
+ unit
+ }
+
+ console.log(`Migrated DelayNode from delayMs=${config.delayMs}ms to value=${value} ${unit}`)
+ return migratedConfig
+ } catch (error) {
+ console.warn('Failed to migrate DelayNode delayMs to value+unit:', error)
+ return config
+ }
+ }
+
+ return config
+}
+
+/**
+ * Check if a DelayNode config needs migration
+ */
+export function needsDelayMigration(config: DelayNodeConfig & Record): boolean {
+ const hasValueAndUnit = typeof config.value === 'number' && config.unit
+ const hasLegacyDelayMs = typeof config.delayMs === 'number'
+
+ // Needs migration if it has delayMs but missing value/unit
+ return hasLegacyDelayMs && !hasValueAndUnit
+}
diff --git a/lib/node-definitions.ts b/lib/node-definitions.ts
index 40412c6..9a929a4 100644
--- a/lib/node-definitions.ts
+++ b/lib/node-definitions.ts
@@ -4,15 +4,9 @@ import {
ActionType,
LogicType,
WorkflowNode,
- ScheduleNodeConfig,
} from '@/types/workflow'
-import { EMAIL_NODE_DEFINITION } from '@/nodes/EmailNode/EmailNode.schema'
-import { HTTP_NODE_DEFINITION } from '@/nodes/HttpNode/HttpNode.schema'
-import { SCHEDULE_NODE_DEFINITION } from '@/nodes/ScheduleNode/ScheduleNode.schema'
-import { WEBHOOK_NODE_DEFINITION } from '@/nodes/WebhookNode/WebhookNode.schema'
-import { MANUAL_NODE_DEFINITION } from '@/nodes/ManualNode/ManualNode.schema'
-import { IF_NODE_DEFINITION } from '@/nodes/IfNode/IfNode.schema'
-import { FILTER_NODE_DEFINITION } from '@/nodes/FilterNode/FilterNode.schema'
+import { getNodeDefinition, NodeDefinition as ImportedNodeDefinition } from '@/nodes'
+import { CredentialType } from '@/types/credentials'
// Minimal, n8n-inspired parameter schema for nodes.
// This powers defaults and validation and can later drive dynamic UIs.
@@ -29,6 +23,7 @@ type ParameterType =
| 'email'
| 'url'
| 'password'
+ | 'credential'
interface ParameterDefinition {
// Label shown to users
@@ -44,9 +39,12 @@ interface ParameterDefinition {
showIf?: Array<{ path: string; equals: string | number | boolean }>
// Default value for this parameter
default?: unknown
+ // For credential type parameters
+ credentialType?: CredentialType
}
-interface NodeDefinition {
+// Legacy NodeDefinition interface for backward compatibility
+interface LegacyNodeDefinition {
nodeType: NodeType
subType: TSubType
label: string
@@ -64,150 +62,65 @@ interface NodeDefinition {
validate?: (config: Record) => string[]
}
-function setValueAtPath(obj: Record, path: string, value: unknown) {
- if (!path) return
- const parts = path.split('.')
- let current: Record = obj
- for (let i = 0; i < parts.length - 1; i += 1) {
- const part = parts[i]
- const existing = current[part]
- const isObject = typeof existing === 'object' && existing !== null
- if (!isObject) {
- current[part] = {}
- }
- current = current[part] as Record
- }
- current[parts[parts.length - 1]] = value
-}
+// Use the imported NodeDefinition for new functions
+type NodeDefinition = ImportedNodeDefinition
-function buildDefaultsFromParameters(params?: ParameterDefinition[]): Record | undefined {
- if (!params || params.length === 0) return undefined
- const cfg: Record = {}
- for (const p of params) {
- if (typeof p.default !== 'undefined') {
- setValueAtPath(cfg, p.path, p.default)
- }
- }
- return cfg
-}
+// Utility functions removed - they are now handled by individual node modules
-// Legacy utility functions - these are now handled by individual node modules
-// Kept for backwards compatibility
+// Legacy compatibility functions - these delegate to the new modular system
+export function findNodeDefinition(node: WorkflowNode): ImportedNodeDefinition | undefined {
+ const data = node.data as WorkflowNode['data']
-// All node definitions are now handled by their respective modules
-// This file provides compatibility functions that delegate to the new modular system
+ // Use the new registry system for all nodes
+ switch (data.nodeType) {
+ case NodeType.ACTION: {
+ const actionType = (data as { actionType: ActionType }).actionType
+ return getNodeDefinition(NodeType.ACTION, actionType)
+ }
-const NODE_DEFINITIONS: NodeDefinition[] = [
- // All definitions moved to individual node modules
- // This array is kept for backwards compatibility but is no longer used
-]
+ case NodeType.TRIGGER: {
+ const triggerType = (data as { triggerType: TriggerType }).triggerType
+ return getNodeDefinition(NodeType.TRIGGER, triggerType)
+ }
+
+ case NodeType.LOGIC: {
+ const logicType = (data as { logicType: LogicType }).logicType
+ return getNodeDefinition(NodeType.LOGIC, logicType)
+ }
-export function findNodeDefinition(node: WorkflowNode): NodeDefinition | undefined {
- const data = node.data as WorkflowNode['data']
-
- // Special case for EmailNode - use the new definition
- if (data.nodeType === NodeType.ACTION && (data as { actionType: ActionType }).actionType === ActionType.EMAIL) {
- return EMAIL_NODE_DEFINITION as unknown as NodeDefinition
- }
-
- // Use legacy system for other nodes
- switch (data.nodeType) {
- case NodeType.ACTION:
- return NODE_DEFINITIONS.find((d) => d.nodeType === NodeType.ACTION && d.subType === (data as { actionType: ActionType }).actionType)
- case NodeType.TRIGGER:
- return NODE_DEFINITIONS.find((d) => d.nodeType === NodeType.TRIGGER && d.subType === (data as { triggerType: TriggerType }).triggerType)
- case NodeType.LOGIC:
- return NODE_DEFINITIONS.find((d) => d.nodeType === NodeType.LOGIC && d.subType === (data as { logicType: LogicType }).logicType)
default:
return undefined
}
}
-
export function getDefaultConfigForNode(nodeType: NodeType, subType: TriggerType | ActionType | LogicType): Record | undefined {
- // Route to appropriate node definition based on type and subtype
- if (nodeType === NodeType.TRIGGER) {
- switch (subType as TriggerType) {
- case TriggerType.SCHEDULE:
- return SCHEDULE_NODE_DEFINITION.getDefaults()
- case TriggerType.WEBHOOK:
- return WEBHOOK_NODE_DEFINITION.getDefaults()
- case TriggerType.MANUAL:
- return MANUAL_NODE_DEFINITION.getDefaults()
- default:
- return {}
- }
- }
-
- if (nodeType === NodeType.ACTION) {
- switch (subType as ActionType) {
- case ActionType.EMAIL:
- return EMAIL_NODE_DEFINITION.getDefaults()
- case ActionType.HTTP:
- return HTTP_NODE_DEFINITION.getDefaults()
- default:
- return {}
- }
- }
-
- if (nodeType === NodeType.LOGIC) {
- switch (subType as LogicType) {
- case LogicType.IF:
- return IF_NODE_DEFINITION.getDefaults()
- case LogicType.FILTER:
- return FILTER_NODE_DEFINITION.getDefaults()
- default:
- return {}
- }
- }
-
- return {}
+ // Use the new registry system for all nodes
+ const definition = getNodeDefinition(nodeType, subType as string)
+ return definition?.getDefaults() || {}
}
export function validateNodeBeforeExecute(node: WorkflowNode): string[] {
const data = node.data as WorkflowNode['data']
const config = (data as { config: Record }).config || {}
- // Route to appropriate node definition for validation
- if (data.nodeType === NodeType.TRIGGER) {
- const triggerData = data as { triggerType: TriggerType }
- switch (triggerData.triggerType) {
- case TriggerType.SCHEDULE:
- return SCHEDULE_NODE_DEFINITION.validate(config as unknown as Record)
- case TriggerType.WEBHOOK:
- return WEBHOOK_NODE_DEFINITION.validate(config as unknown as Record)
- case TriggerType.MANUAL:
- return MANUAL_NODE_DEFINITION.validate(config)
- default:
- return []
- }
- }
-
- if (data.nodeType === NodeType.ACTION) {
- const actionData = data as { actionType: ActionType }
- switch (actionData.actionType) {
- case ActionType.EMAIL:
- return EMAIL_NODE_DEFINITION.validate(config as unknown as Record)
- case ActionType.HTTP:
- return HTTP_NODE_DEFINITION.validate(config)
- default:
- return []
- }
- }
-
- if (data.nodeType === NodeType.LOGIC) {
- const logicData = data as { logicType: LogicType }
- switch (logicData.logicType) {
- case LogicType.IF:
- return IF_NODE_DEFINITION.validate(config)
- case LogicType.FILTER:
- return FILTER_NODE_DEFINITION.validate(config)
- default:
- return []
- }
+ // Use the new registry system for all nodes
+ let subType: string
+ switch (data.nodeType) {
+ case NodeType.TRIGGER:
+ subType = (data as { triggerType: TriggerType }).triggerType
+ break
+ case NodeType.ACTION:
+ subType = (data as { actionType: ActionType }).actionType
+ break
+ case NodeType.LOGIC:
+ subType = (data as { logicType: LogicType }).logicType
+ break
+ default:
+ return []
}
- return []
+ const definition = getNodeDefinition(data.nodeType, subType)
+ return definition?.validate(config) || []
}
diff --git a/lib/security.ts b/lib/security.ts
index 4b6e055..3e81d0f 100644
--- a/lib/security.ts
+++ b/lib/security.ts
@@ -84,6 +84,28 @@ export function decryptCredential(encryptedValue: string): string {
}
}
+/**
+ * Encrypt database node configuration
+ */
+export function encryptDatabaseConfig(config: Record): Record {
+ // For database nodes, we don't need to encrypt here since credentialId is just a reference
+ // The actual connection string is encrypted in the credential store
+ // However, trigger migration for legacy connectionString if present
+ if (config.connectionString && typeof config.connectionString === 'string' && !config.credentialId) {
+ console.warn('Found legacy connectionString in database config. Migration should be handled at the workflow level.');
+ }
+ return config;
+}
+
+/**
+ * Decrypt database node configuration
+ */
+export function decryptDatabaseConfig(config: Record): Record {
+ // For database nodes, we don't need to decrypt here since credentialId is just a reference
+ // The actual connection string is decrypted in the service when needed
+ return config;
+}
+
/**
* Encrypt email service configuration
*/
diff --git a/lib/type-safe-utils.ts b/lib/type-safe-utils.ts
new file mode 100644
index 0000000..ee33f99
--- /dev/null
+++ b/lib/type-safe-utils.ts
@@ -0,0 +1,256 @@
+/**
+ * Type-safe utility functions for accessing object values by path
+ * Replaces unsafe getValueAtPath usage throughout the codebase
+ */
+
+/**
+ * Type guard to check if a value is a valid object
+ */
+function isValidObject(value: unknown): value is Record {
+ return value !== null && typeof value === 'object' && !Array.isArray(value)
+}
+
+/**
+ * Generic type-safe path getter with fallback support
+ */
+export function getValueAtPath(
+ obj: Record | undefined,
+ path: string,
+ defaultValue?: T
+): T | undefined {
+ if (!obj || !isValidObject(obj)) {
+ return defaultValue
+ }
+
+ try {
+ const result = path.split('.').reduce((acc: unknown, part: string) => {
+ if (isValidObject(acc)) {
+ return acc[part]
+ }
+ return undefined
+ }, obj)
+
+ return result !== undefined ? (result as T) : defaultValue
+ } catch {
+ return defaultValue
+ }
+}
+
+/**
+ * Type-safe string value getter
+ */
+export function getStringValue(
+ obj: Record | undefined,
+ path: string,
+ defaultValue = ''
+): string {
+ const value = getValueAtPath(obj, path, defaultValue)
+ return typeof value === 'string' ? value : defaultValue
+}
+
+/**
+ * Type-safe boolean value getter
+ */
+export function getBooleanValue(
+ obj: Record | undefined,
+ path: string,
+ defaultValue = false
+): boolean {
+ const value = getValueAtPath(obj, path, defaultValue)
+ return typeof value === 'boolean' ? value : defaultValue
+}
+
+/**
+ * Type-safe number value getter
+ */
+export function getNumberValue(
+ obj: Record | undefined,
+ path: string,
+ defaultValue = 0
+): number {
+ const value = getValueAtPath(obj, path, defaultValue)
+ return typeof value === 'number' ? value : defaultValue
+}
+
+/**
+ * Type-safe array value getter
+ */
+export function getArrayValue(
+ obj: Record | undefined,
+ path: string,
+ defaultValue: T[] = []
+): T[] {
+ const value = getValueAtPath(obj, path, defaultValue)
+ return Array.isArray(value) ? value : defaultValue
+}
+
+/**
+ * Type-safe object value getter
+ */
+export function getObjectValue = Record>(
+ obj: Record | undefined,
+ path: string,
+ defaultValue: T = {} as T
+): T {
+ const value = getValueAtPath(obj, path, defaultValue)
+ return isValidObject(value) ? (value as T) : defaultValue
+}
+
+/**
+ * Type-safe string value getter with validation for non-empty strings
+ */
+export function getNonEmptyStringValue(
+ obj: Record | undefined,
+ path: string,
+ defaultValue = ''
+): string {
+ const value = getStringValue(obj, path, defaultValue)
+ return value.trim() || defaultValue
+}
+
+/**
+ * Type-safe value checker for conditional rendering
+ */
+export function hasValueAtPath(
+ obj: Record | undefined,
+ path: string
+): boolean {
+ const value = getValueAtPath(obj, path)
+ return value !== undefined && value !== null
+}
+
+/**
+ * Type-safe equality checker for path values
+ */
+export function pathValueEquals(
+ obj: Record | undefined,
+ path: string,
+ expectedValue: unknown
+): boolean {
+ const value = getValueAtPath(obj, path)
+ return value === expectedValue
+}
+
+/**
+ * Safe default value getter with type checking - always returns the expected type
+ */
+export function getSafeDefaultValue(
+ defaultValue: unknown,
+ type: 'string'
+): string
+export function getSafeDefaultValue(
+ defaultValue: unknown,
+ type: 'number'
+): number
+export function getSafeDefaultValue(
+ defaultValue: unknown,
+ type: 'boolean'
+): boolean
+export function getSafeDefaultValue>(
+ defaultValue: unknown,
+ type: 'object'
+): T
+export function getSafeDefaultValue(
+ defaultValue: unknown,
+ type: 'array'
+): T
+export function getSafeDefaultValue(
+ defaultValue: unknown,
+ type: 'string' | 'number' | 'boolean' | 'object' | 'array'
+): string | number | boolean | Record | unknown[] {
+ switch (type) {
+ case 'string':
+ return typeof defaultValue === 'string' ? defaultValue : ''
+ case 'number':
+ return typeof defaultValue === 'number' ? defaultValue : 0
+ case 'boolean':
+ return typeof defaultValue === 'boolean' ? defaultValue : false
+ case 'object':
+ return defaultValue !== null && typeof defaultValue === 'object' && !Array.isArray(defaultValue)
+ ? defaultValue as Record
+ : {}
+ case 'array':
+ return Array.isArray(defaultValue) ? defaultValue as unknown[] : []
+ default:
+ return ''
+ }
+}
+
+/**
+ * Safe parameter description getter
+ */
+export function getSafeDescription(description: unknown): string {
+ return typeof description === 'string' ? description : ''
+}
+
+/**
+ * Safe parameter placeholder getter
+ */
+export function getSafePlaceholder(placeholder: unknown): string {
+ return typeof placeholder === 'string' ? placeholder : ''
+}
+
+/**
+ * Comprehensive type-safe parameter value getter
+ */
+export function getTypedParameterValue(
+ config: Record | undefined,
+ path: string,
+ paramDefault: unknown,
+ type: 'string'
+): string
+export function getTypedParameterValue(
+ config: Record | undefined,
+ path: string,
+ paramDefault: unknown,
+ type: 'number'
+): number
+export function getTypedParameterValue(
+ config: Record | undefined,
+ path: string,
+ paramDefault: unknown,
+ type: 'boolean'
+): boolean
+export function getTypedParameterValue(
+ config: Record | undefined,
+ path: string,
+ paramDefault: unknown,
+ type: 'string' | 'number' | 'boolean'
+): string | number | boolean {
+ // Get value from config path
+ const configValue = getValueAtPath(config, path)
+
+ // If config has a value of the expected type, use it
+ if (type === 'string' && typeof configValue === 'string') {
+ return configValue
+ }
+ if (type === 'number' && typeof configValue === 'number') {
+ return configValue
+ }
+ if (type === 'boolean' && typeof configValue === 'boolean') {
+ return configValue
+ }
+
+ // Fall back to param default if it's the right type
+ if (type === 'string' && typeof paramDefault === 'string') {
+ return paramDefault
+ }
+ if (type === 'number' && typeof paramDefault === 'number') {
+ return paramDefault
+ }
+ if (type === 'boolean' && typeof paramDefault === 'boolean') {
+ return paramDefault
+ }
+
+ // Final fallback to type defaults
+ switch (type) {
+ case 'string':
+ return ''
+ case 'number':
+ return 0
+ case 'boolean':
+ return false
+ default:
+ return ''
+ }
+}
diff --git a/lib/workflow-id-validation.ts b/lib/workflow-id-validation.ts
new file mode 100644
index 0000000..6a41d03
--- /dev/null
+++ b/lib/workflow-id-validation.ts
@@ -0,0 +1,68 @@
+/**
+ * Shared workflow ID validation utilities
+ * This module provides consistent validation across the application
+ */
+
+/**
+ * Browser-compatible regex pattern for workflow ID validation
+ * - Must start and end with alphanumeric character
+ * - Can contain alphanumeric, dash, or underscore in the middle
+ * - No consecutive special characters (-- __ -_ _-)
+ * - Uses negative lookaheads instead of lookbehind for browser compatibility
+ */
+export const workflowIdPattern = /^(?!.*[_-]{2})(?![_-])(?!.*[_-]$)[a-zA-Z0-9_-]+$/
+
+/**
+ * Reserved names that are not allowed as workflow IDs
+ * These are case-insensitive and include common system paths and keywords
+ */
+export const reservedWorkflowNames = new Set([
+ 'api', 'app', 'www', 'admin', 'root', 'test', 'demo', 'config', 'settings',
+ 'system', 'public', 'private', 'static', 'assets', 'lib', 'src', 'node_modules',
+ 'null', 'undefined', 'true', 'false', 'new', 'delete', 'edit', 'create'
+])
+
+/**
+ * Validates and sanitizes a workflowId parameter
+ * @param workflowId - The workflowId to validate
+ * @returns A safe workflowId string or '' fallback
+ */
+export function validateWorkflowId(workflowId: string | null): string {
+ if (!workflowId) {
+ return ''
+ }
+
+ // Trim whitespace from input
+ const trimmed = workflowId.trim()
+
+ // Check if empty after trimming
+ if (!trimmed) {
+ return ''
+ }
+
+ // Check length constraints (min 3, max 64 characters)
+ if (trimmed.length < 3 || trimmed.length > 64) {
+ return ''
+ }
+
+ // Check for reserved names (case-insensitive)
+ if (reservedWorkflowNames.has(trimmed.toLowerCase())) {
+ return ''
+ }
+
+ if (!workflowIdPattern.test(trimmed)) {
+ return ''
+ }
+
+ return trimmed
+}
+
+/**
+ * Simple boolean validation function that returns true/false
+ * @param input - The string to validate
+ * @returns true if valid, false otherwise
+ */
+export function isValidWorkflowId(input: string): boolean {
+ const result = validateWorkflowId(input)
+ return result !== ''
+}
diff --git a/nodes/DatabaseNode/DatabaseNode.schema.ts b/nodes/DatabaseNode/DatabaseNode.schema.ts
new file mode 100644
index 0000000..a30c00e
--- /dev/null
+++ b/nodes/DatabaseNode/DatabaseNode.schema.ts
@@ -0,0 +1,209 @@
+import { NodeType, ActionType } from "@/types/workflow";
+import { DatabaseNodeConfig } from "./DatabaseNode.types";
+import { credentialStore, migrateConnectionStringToCredential } from "@/lib/credential-store";
+import { validateDatabaseNodeConfig, migrateDatabaseNodeConfig } from "@/lib/migration-utils";
+import { CredentialType } from "@/types/credentials";
+
+interface ParameterDefinition {
+ name: string;
+ label: string;
+ type:
+ | "text"
+ | "textarea"
+ | "select"
+ | "number"
+ | "boolean"
+ | "email"
+ | "url"
+ | "json"
+ | "password"
+ | "credential";
+ required?: boolean;
+ defaultValue?: unknown;
+ options?: Array<{ label: string; value: string }> | (() => Array<{ label: string; value: string }>);
+ placeholder?: string;
+ description?: string;
+ showIf?: Array<{ path: string; equals: string | number | boolean }>;
+ credentialType?: CredentialType;
+}
+
+/**
+ * Type guard to validate if an object is a valid DatabaseNodeConfig
+ */
+function isDatabaseNodeConfig(obj: Record): obj is DatabaseNodeConfig & Record {
+ // Check if obj is an object and not null
+ if (typeof obj !== 'object' || obj === null) {
+ return false;
+ }
+
+ // Check operation field
+ const validOperations = ["select", "insert", "update", "delete"] as const;
+ if (typeof obj.operation !== 'string' || !validOperations.includes(obj.operation as DatabaseNodeConfig['operation'])) {
+ return false;
+ }
+
+ // Check credentialId field (new) or connectionString field (legacy)
+ const hasCredentialId = typeof obj.credentialId === 'string' && obj.credentialId.trim().length > 0;
+ const hasConnectionString = typeof obj.connectionString === 'string' && obj.connectionString.trim().length > 0;
+
+ if (!hasCredentialId && !hasConnectionString) {
+ return false;
+ }
+
+ // Check query field
+ if (typeof obj.query !== 'string') {
+ return false;
+ }
+
+ // Check optional parameters field
+ if (obj.parameters !== undefined &&
+ (typeof obj.parameters !== 'string' && (typeof obj.parameters !== 'object' || obj.parameters === null || Array.isArray(obj.parameters)))) {
+ return false;
+ }
+
+ // Check optional schema field
+ if (obj.schema !== undefined && typeof obj.schema !== 'string') {
+ return false;
+ }
+
+ // Check optional table field
+ if (obj.table !== undefined && typeof obj.table !== 'string') {
+ return false;
+ }
+
+ return true;
+}
+
+// nodes/DatabaseNode/DatabaseNode.schema.ts
+
+interface NodeDefinition> {
+ nodeType: NodeType;
+ subType: ActionType;
+ label: string;
+ description: string;
+ parameters: ParameterDefinition[];
+ validate: (config: T) => string[];
+ getDefaults: () => DatabaseNodeConfig;
+}
+
+export const DATABASE_NODE_DEFINITION: NodeDefinition = {
+ nodeType: NodeType.ACTION,
+ subType: ActionType.DATABASE,
+ label: "Database Query",
+ description: "Execute database queries (placeholder implementation)",
+ parameters: [
+ {
+ name: "operation",
+ label: "Operation",
+ type: "select",
+ required: true,
+ defaultValue: "select",
+ options: [
+ { label: "Select", value: "select" },
+ { label: "Insert", value: "insert" },
+ { label: "Update", value: "update" },
+ { label: "Delete", value: "delete" },
+ ],
+ description: "Database operation to perform",
+ },
+ {
+ name: "credentialId",
+ label: "Database Credential",
+ type: "credential",
+ credentialType: "database",
+ required: true,
+ defaultValue: "",
+ placeholder: "Select or create a database credential",
+ description: "Secure database connection credential",
+ options: () => {
+ const credentials = credentialStore.getCredentialsByType('database') || [];
+ return credentials.map(cred => ({
+ label: `${cred.name} (${cred.description || 'No description'})`,
+ value: cred.id
+ }));
+ },
+ },
+ {
+ name: "query",
+ label: "SQL Query",
+ type: "textarea",
+ required: true,
+ defaultValue: "",
+ placeholder: "SELECT * FROM users WHERE id = $1",
+ description: "SQL query to execute",
+ },
+ {
+ name: "parameters",
+ label: "Parameters (JSON)",
+ type: "json",
+ required: false,
+ placeholder: '{"param1": "value1"}',
+ description: "Query parameters as JSON object",
+ },
+ ],
+ validate: (config: Record): string[] => {
+ const errors: string[] = [];
+
+ // First check if the config structure is valid using type guard
+ if (!isDatabaseNodeConfig(config)) {
+ errors.push("Invalid configuration structure");
+ return errors; // Return early if structure is invalid
+ }
+
+ // Now we can safely use config as DatabaseNodeConfig
+ const typed = config as DatabaseNodeConfig & Record;
+
+ // Validate operation (additional business logic validation)
+ if (!typed.operation || typed.operation.trim().length === 0) {
+ errors.push("Valid operation is required");
+ }
+
+ // Use migration utilities for credential validation
+ const credentialErrors = validateDatabaseNodeConfig(typed);
+ errors.push(...credentialErrors);
+
+ // Validate query (additional business logic validation)
+ if (!typed.query || typed.query.trim().length === 0) {
+ errors.push("SQL query is required");
+ }
+
+ // Additional validation for parameters content if provided
+ if (typed.parameters !== undefined && typed.parameters !== null) {
+ let parsedParameters: unknown;
+
+ // Check if parameters is a string and attempt JSON parsing
+ if (typeof typed.parameters === 'string') {
+ try {
+ parsedParameters = JSON.parse(typed.parameters);
+ } catch (e) {
+ errors.push("Invalid parameters JSON: Unable to parse JSON string");
+ return errors; // Return early if JSON parsing fails
+ }
+ } else {
+ parsedParameters = typed.parameters;
+ }
+
+ // Validate that the parsed/provided value is a proper object
+ if (parsedParameters === null) {
+ errors.push("Parameters cannot be null");
+ } else if (Array.isArray(parsedParameters)) {
+ errors.push("Parameters must be an object, not an array");
+ } else if (typeof parsedParameters !== 'object') {
+ errors.push("Parameters must be an object");
+ } else {
+ // If we parsed from string, replace the original value for subsequent validation
+ if (typeof typed.parameters === 'string') {
+ typed.parameters = parsedParameters as Record;
+ }
+ }
+ }
+
+ return errors;
+ },
+ getDefaults: (): DatabaseNodeConfig => ({
+ operation: "select",
+ credentialId: "",
+ query: "",
+ parameters: {},
+ }),
+};
diff --git a/nodes/DatabaseNode/DatabaseNode.service.ts b/nodes/DatabaseNode/DatabaseNode.service.ts
new file mode 100644
index 0000000..44f965e
--- /dev/null
+++ b/nodes/DatabaseNode/DatabaseNode.service.ts
@@ -0,0 +1,185 @@
+import { DatabaseNodeConfig, DatabaseExecutionResult } from './DatabaseNode.types'
+import { NodeExecutionContext, NodeExecutionResult } from '../types'
+import { resolveConnectionString, migrateConnectionStringToCredential } from '@/lib/credential-store'
+
+/**
+ * Creates an abortable delay that respects AbortSignal
+ */
+async function abortableDelay(ms: number, signal?: AbortSignal): Promise {
+ return new Promise((resolve, reject) => {
+ if (signal?.aborted) {
+ const error = new Error('The operation was aborted')
+ error.name = 'AbortError'
+ reject(error)
+ return
+ }
+
+ const timeoutId = setTimeout(() => {
+ resolve()
+ }, ms)
+
+ signal?.addEventListener('abort', () => {
+ clearTimeout(timeoutId)
+ const error = new Error('The operation was aborted')
+ error.name = 'AbortError'
+ reject(error)
+ })
+ })
+}
+
+export async function executeDatabaseNode(context: NodeExecutionContext): Promise {
+ const startTime = Date.now()
+
+ try {
+ const cfg = context?.config
+ if (!cfg) {
+ return {
+ success: false,
+ error: 'Node configuration is missing'
+ }
+ }
+ const config = cfg as DatabaseNodeConfig & Record
+
+ // Handle migration and credential resolution
+ let connectionString: string | null = null
+
+ // Check if we have credentialId (new approach)
+ if (config.credentialId && typeof config.credentialId === 'string') {
+ try {
+ connectionString = resolveConnectionString(config.credentialId)
+ if (!connectionString) {
+ return {
+ success: false,
+ error: 'Failed to resolve database credential'
+ }
+ }
+ } catch (error) {
+ return {
+ success: false,
+ error: error instanceof Error ? error.message : 'Failed to resolve database credential'
+ }
+ }
+ }
+ // Fallback to legacy connectionString and attempt migration
+ else if (config.connectionString && typeof config.connectionString === 'string' && config.connectionString.trim().length > 0) {
+ connectionString = config.connectionString
+
+ // TODO: In a real implementation, you would want to trigger migration here
+ // For now, we'll just log a warning
+ console.warn('Using legacy connectionString. Consider migrating to credential reference.');
+ }
+ else {
+ return {
+ success: false,
+ error: 'Database credential is required'
+ }
+ }
+ if (!config.query || typeof config.query !== 'string' || config.query.trim().length === 0) {
+ return {
+ success: false,
+ error: 'SQL query is required'
+ }
+ }
+
+ // Check for abort signal
+ if (context.signal?.aborted) {
+ return {
+ success: false,
+ error: 'Execution was cancelled'
+ }
+ }
+
+ // PLACEHOLDER IMPLEMENTATION
+ // In a real implementation, this would:
+ // 1. Use the resolved connectionString to establish database connection
+ // 2. Execute the SQL query with parameters
+ // 3. Return actual results
+ //
+ // Note: connectionString is now securely resolved from credential store
+ void connectionString; // Mark as intentionally used to avoid linter warnings
+
+ // Simulate database operation delay
+ await abortableDelay(100, context.signal)
+
+ const duration = Date.now() - startTime
+
+ // Mock response based on operation type
+ let mockResult: DatabaseExecutionResult
+
+ switch (config.operation) {
+ case 'select':
+ mockResult = {
+ operation: 'select',
+ rows: [
+ { id: 1, name: 'Mock User 1', email: 'user1@example.com' },
+ { id: 2, name: 'Mock User 2', email: 'user2@example.com' }
+ ],
+ duration,
+ query: config.query
+ }
+ break
+
+ case 'insert':
+ mockResult = {
+ operation: 'insert',
+ affectedRows: 1,
+ insertId: 123,
+ duration,
+ query: config.query
+ }
+ break
+
+ case 'update':
+ mockResult = {
+ operation: 'update',
+ affectedRows: 2,
+ duration,
+ query: config.query
+ }
+ break
+
+ case 'delete':
+ mockResult = {
+ operation: 'delete',
+ affectedRows: 1,
+ duration,
+ query: config.query
+ }
+ break
+
+ default:
+ return {
+ success: false,
+ error: `Unsupported operation: ${config.operation}`
+ }
+ }
+
+ return {
+ success: true,
+ output: mockResult
+ }
+
+ } catch (error) {
+ const duration = Date.now() - startTime
+
+ if (error instanceof Error) {
+ // Handle specific error types
+ if (error.name === 'AbortError') {
+ return {
+ success: false,
+ error: 'Database operation was cancelled'
+ }
+ }
+
+ return {
+ success: false,
+ error: error.message
+ }
+ }
+
+ return {
+ success: false,
+ error: 'Unknown error occurred during database operation'
+ }
+ }
+}
diff --git a/nodes/DatabaseNode/DatabaseNode.test.ts b/nodes/DatabaseNode/DatabaseNode.test.ts
new file mode 100644
index 0000000..4717208
--- /dev/null
+++ b/nodes/DatabaseNode/DatabaseNode.test.ts
@@ -0,0 +1,241 @@
+import { describe, it, expect, vi, beforeEach } from 'vitest'
+import { executeDatabaseNode } from './DatabaseNode.service'
+import { DATABASE_NODE_DEFINITION } from './DatabaseNode.schema'
+import { DatabaseNodeConfig, DatabaseExecutionResult } from './DatabaseNode.types'
+import { NodeExecutionContext } from '../types'
+
+describe('DatabaseNode', () => {
+ describe('Schema and Validation', () => {
+ it('should have correct node definition structure', () => {
+ expect(DATABASE_NODE_DEFINITION.nodeType).toBe('action')
+ expect(DATABASE_NODE_DEFINITION.subType).toBe('database')
+ expect(DATABASE_NODE_DEFINITION.label).toBe('Database Query')
+ expect(DATABASE_NODE_DEFINITION.parameters).toHaveLength(4)
+ })
+
+ it('should validate required fields', () => {
+ const invalidConfigs = [
+ {}, // empty config
+ { operation: 'select' }, // missing connectionString and query
+ { connectionString: 'test', query: '' }, // empty query
+ { connectionString: '', query: 'SELECT *' }, // empty connectionString
+ ]
+
+ invalidConfigs.forEach(config => {
+ const errors = DATABASE_NODE_DEFINITION.validate(config)
+ expect(errors.length).toBeGreaterThan(0)
+ })
+ })
+
+ it('should validate operation types', () => {
+ const config = {
+ operation: 'invalid',
+ credentialId: 'test-credential-id',
+ query: 'SELECT *'
+ }
+
+ const errors = DATABASE_NODE_DEFINITION.validate(config)
+ expect(errors).toContain('Invalid configuration structure')
+ })
+
+ it('should validate parameters as object', () => {
+ const config = {
+ operation: 'select',
+ credentialId: 'test-credential-id',
+ query: 'SELECT *',
+ parameters: 'invalid parameters'
+ }
+
+ const errors = DATABASE_NODE_DEFINITION.validate(config)
+ expect(errors.length).toBeGreaterThan(0)
+ expect(errors).toContain('Invalid parameters JSON: Unable to parse JSON string')
+ })
+
+ it('should pass validation with valid config', () => {
+ const config = {
+ operation: 'select',
+ connectionString: 'postgresql://user:pass@localhost:5432/db',
+ query: 'SELECT * FROM users',
+ parameters: { limit: 10 }
+ }
+
+ const errors = DATABASE_NODE_DEFINITION.validate(config)
+ expect(errors).toHaveLength(0)
+ })
+
+ it('should provide correct defaults', () => {
+ const defaults = DATABASE_NODE_DEFINITION.getDefaults()
+ expect(defaults).toEqual({
+ operation: 'select',
+ credentialId: '',
+ query: '',
+ parameters: {}
+ })
+ })
+ })
+
+ describe('Database Execution', () => {
+ let mockContext: NodeExecutionContext
+
+ beforeEach(() => {
+ mockContext = {
+ nodeId: 'test-node',
+ workflowId: 'test-workflow',
+ executionId: 'test-execution',
+ config: {
+ operation: 'select',
+ credentialId: 'test-credential-id',
+ connectionString: 'postgresql://user:pass@localhost:5432/testdb',
+ query: 'SELECT * FROM users'
+ } as DatabaseNodeConfig,
+ input: {},
+ previousNodes: []
+ }
+ })
+
+ it('should execute SELECT operation successfully', async () => {
+ const result = await executeDatabaseNode(mockContext)
+
+ expect(result.success).toBe(true)
+ const output = result.output as DatabaseExecutionResult
+ expect(output).toMatchObject({
+ operation: 'select',
+ query: 'SELECT * FROM users'
+ })
+ expect(Array.isArray(output.rows)).toBe(true)
+ const rows = output.rows as Array<{ id: number; name: string }>
+ expect(rows.length).toBeGreaterThan(0)
+ if (rows.length > 0) {
+ expect(typeof rows[0].id).toBe('number')
+ expect(typeof rows[0].name).toBe('string')
+ }
+ expect(typeof output.duration).toBe('number')
+ })
+
+ it('should execute INSERT operation successfully', async () => {
+ mockContext.config = {
+ operation: 'insert',
+ credentialId: 'test-credential-id',
+ connectionString: 'postgresql://user:pass@localhost:5432/testdb',
+ query: 'INSERT INTO users (name, email) VALUES ($1, $2)',
+ parameters: { name: 'John', email: 'john@example.com' }
+ } as DatabaseNodeConfig
+
+ const result = await executeDatabaseNode(mockContext)
+
+ expect(result.success).toBe(true)
+ const output = result.output as DatabaseExecutionResult
+ expect(output).toMatchObject({
+ operation: 'insert',
+ affectedRows: 1
+ })
+ expect(typeof output.insertId).toBe('number')
+ expect(typeof output.duration).toBe('number')
+ })
+
+ it('should execute UPDATE operation successfully', async () => {
+ mockContext.config = {
+ operation: 'update',
+ credentialId: 'test-credential-id',
+ connectionString: 'postgresql://user:pass@localhost:5432/testdb',
+ query: 'UPDATE users SET name = $1 WHERE id = $2'
+ } as DatabaseNodeConfig
+
+ const result = await executeDatabaseNode(mockContext)
+
+ expect(result.success).toBe(true)
+ const output = result.output as DatabaseExecutionResult
+ expect(output).toMatchObject({
+ operation: 'update',
+ affectedRows: 2
+ })
+ expect(typeof output.duration).toBe('number')
+ })
+
+ it('should execute DELETE operation successfully', async () => {
+ mockContext.config = {
+ operation: 'delete',
+ credentialId: 'test-credential-id',
+ connectionString: 'postgresql://user:pass@localhost:5432/testdb',
+ query: 'DELETE FROM users WHERE id = $1'
+ } as DatabaseNodeConfig
+
+ const result = await executeDatabaseNode(mockContext)
+
+ expect(result.success).toBe(true)
+ const output = result.output as DatabaseExecutionResult
+ expect(output).toMatchObject({
+ operation: 'delete',
+ affectedRows: 1
+ })
+ expect(typeof output.duration).toBe('number')
+ })
+
+ it('should handle missing connection string', async () => {
+ mockContext.config = {
+ operation: 'select',
+ credentialId: '',
+ connectionString: '',
+ query: 'SELECT *'
+ } as DatabaseNodeConfig
+
+ const result = await executeDatabaseNode(mockContext)
+
+ expect(result.success).toBe(false)
+ expect(result.error).toBe('Database credential is required')
+ })
+
+ it('should handle whitespace-only credential ID', async () => {
+ mockContext.config = {
+ operation: 'select',
+ credentialId: ' ', // whitespace-only
+ connectionString: '',
+ query: 'SELECT *'
+ } as DatabaseNodeConfig
+
+ const result = await executeDatabaseNode(mockContext)
+
+ expect(result.success).toBe(false)
+ expect(result.error).toBe('Connection string is required')
+ })
+
+ it('should handle missing query', async () => {
+ mockContext.config = {
+ operation: 'select',
+ credentialId: 'test-credential-id',
+ connectionString: 'postgresql://test',
+ query: ''
+ } as DatabaseNodeConfig
+
+ const result = await executeDatabaseNode(mockContext)
+
+ expect(result.success).toBe(false)
+ expect(result.error).toBe('SQL query is required')
+ })
+
+ it('should handle abort signal', async () => {
+ const abortController = new AbortController()
+ mockContext.signal = abortController.signal
+ abortController.abort()
+
+ const result = await executeDatabaseNode(mockContext)
+
+ expect(result.success).toBe(false)
+ expect(result.error).toBe('Execution was cancelled')
+ })
+
+ it('should handle unsupported operation', async () => {
+ mockContext.config = {
+ operation: 'truncate' as DatabaseNodeConfig['operation'],
+ credentialId: 'test-credential-id',
+ connectionString: 'postgresql://test',
+ query: 'TRUNCATE TABLE users'
+ } as DatabaseNodeConfig
+
+ const result = await executeDatabaseNode(mockContext)
+
+ expect(result.success).toBe(false)
+ expect(result.error).toBe('Unsupported operation: truncate')
+ })
+ })
+})
diff --git a/nodes/DatabaseNode/DatabaseNode.tsx b/nodes/DatabaseNode/DatabaseNode.tsx
new file mode 100644
index 0000000..7937ca8
--- /dev/null
+++ b/nodes/DatabaseNode/DatabaseNode.tsx
@@ -0,0 +1,40 @@
+import React from 'react'
+import { Database } from 'lucide-react'
+import { ActionType } from '@/types/workflow'
+import { BaseNode } from '@/components/workflow/nodes/base-node'
+import { DatabaseNodeData } from './DatabaseNode.types'
+
+interface DatabaseNodeProps {
+ data: DatabaseNodeData
+ selected?: boolean
+}
+
+export function DatabaseNode({ data, selected }: DatabaseNodeProps) {
+ const displayConfig = {
+ operation: data.config?.operation || 'select',
+ table: data.config?.table || 'table',
+ connectionType: data.config?.connectionString?.includes('postgresql') ? 'PostgreSQL'
+ : data.config?.connectionString?.includes('mysql') ? 'MySQL'
+ : data.config?.connectionString?.includes('sqlite') ? 'SQLite'
+ : 'Database'
+ }
+
+ // Create enhanced data with description for BaseNode
+ const enhancedData = {
+ ...data,
+ description: `${displayConfig.operation.toUpperCase()} • ${displayConfig.connectionType} - Query: ${data.config?.query?.substring(0, 30) || 'Not configured'}${(data.config?.query?.length || 0) > 30 ? '...' : ''}`
+ }
+
+ return (
+ }
+ color="#a855f7"
+ />
+ )
+}
+
+// Export the node type for registration
+export const DATABASE_NODE_TYPE = ActionType.DATABASE
diff --git a/nodes/DatabaseNode/DatabaseNode.types.ts b/nodes/DatabaseNode/DatabaseNode.types.ts
new file mode 100644
index 0000000..7756042
--- /dev/null
+++ b/nodes/DatabaseNode/DatabaseNode.types.ts
@@ -0,0 +1,31 @@
+import { ActionNodeData, ActionType } from '@/types/workflow'
+
+export interface DatabaseNodeConfig {
+ operation: 'select' | 'insert' | 'update' | 'delete'
+ credentialId: string
+ query: string
+ parameters?: Record
+ schema?: string
+ table?: string
+ // @deprecated Legacy field for backward compatibility - will be migrated to credentialId
+ // Use credentialId with secure credential store instead
+ connectionString?: string
+ // Index signature to make it compatible with Record
+ [key: string]: unknown
+}
+
+export interface DatabaseNodeData extends ActionNodeData {
+ actionType: ActionType.DATABASE
+ config: DatabaseNodeConfig & Record
+}
+
+export interface DatabaseExecutionResult {
+ operation: string
+ rows?: unknown[]
+ affectedRows?: number
+ insertId?: string | number
+ duration: number
+ query: string
+}
+
+export type { DatabaseNodeConfig as DatabaseConfig }
diff --git a/nodes/DatabaseNode/index.ts b/nodes/DatabaseNode/index.ts
new file mode 100644
index 0000000..5e333d8
--- /dev/null
+++ b/nodes/DatabaseNode/index.ts
@@ -0,0 +1,4 @@
+export { DatabaseNode, DATABASE_NODE_TYPE } from './DatabaseNode'
+export { executeDatabaseNode } from './DatabaseNode.service'
+export { DATABASE_NODE_DEFINITION } from './DatabaseNode.schema'
+export type { DatabaseNodeConfig, DatabaseNodeData, DatabaseExecutionResult } from './DatabaseNode.types'
diff --git a/nodes/DelayNode/DelayNode.schema.ts b/nodes/DelayNode/DelayNode.schema.ts
new file mode 100644
index 0000000..f517847
--- /dev/null
+++ b/nodes/DelayNode/DelayNode.schema.ts
@@ -0,0 +1,152 @@
+import { NodeType, ActionType } from "@/types/workflow";
+import { DelayNodeConfig, getDelayMs } from "./DelayNode.types";
+
+interface ParameterDefinition {
+ name: string;
+ label: string;
+ type:
+ | "text"
+ | "textarea"
+ | "select"
+ | "number"
+ | "boolean"
+ | "email"
+ | "url"
+ | "json"
+ | "password";
+ required?: boolean;
+ defaultValue?: unknown;
+ options?: Array<{ label: string; value: string }>;
+ placeholder?: string;
+ description?: string;
+ showIf?: Array<{ path: string; equals: string | number | boolean }>;
+}
+
+interface NodeDefinition {
+ nodeType: NodeType;
+ subType: ActionType;
+ label: string;
+ description: string;
+ parameters: ParameterDefinition[];
+ validate: (config: Record) => string[];
+ getDefaults: () => DelayNodeConfig;
+}
+
+export const DELAY_NODE_DEFINITION: NodeDefinition = {
+ nodeType: NodeType.ACTION,
+ subType: ActionType.DELAY,
+ label: "Delay",
+ description: "Add a delay/wait period in workflow execution (placeholder implementation)",
+ parameters: [
+ {
+ name: "delayType",
+ label: "Delay Type",
+ type: "select",
+ required: true,
+ defaultValue: "fixed",
+ options: [
+ { label: "Fixed Duration", value: "fixed" },
+ { label: "Random Duration", value: "random" },
+ { label: "Exponential Backoff", value: "exponential" },
+ ],
+ description: "Type of delay to apply",
+ },
+ {
+ name: "value",
+ label: "Delay Value",
+ type: "number",
+ required: true,
+ defaultValue: 1,
+ placeholder: "5",
+ description: "Duration value for the delay",
+ },
+ {
+ name: "unit",
+ label: "Time Unit",
+ type: "select",
+ required: true,
+ defaultValue: "seconds",
+ options: [
+ { label: "Milliseconds", value: "milliseconds" },
+ { label: "Seconds", value: "seconds" },
+ { label: "Minutes", value: "minutes" },
+ { label: "Hours", value: "hours" },
+ ],
+ description: "Time unit for the delay value",
+ },
+ {
+ name: "maxDelayMs",
+ label: "Max Delay (ms)",
+ type: "number",
+ required: false,
+ placeholder: "60000",
+ description: "Maximum delay in milliseconds (for random/exponential)",
+ showIf: [
+ { path: "delayType", equals: "random" },
+ { path: "delayType", equals: "exponential" },
+ ],
+ },
+ {
+ name: "passthrough",
+ label: "Pass Through Data",
+ type: "boolean",
+ required: false,
+ defaultValue: true,
+ description: "Whether to pass input data through to the output",
+ },
+ ],
+ validate: (config: Record): string[] => {
+ const errors: string[] = [];
+ const typed = config as unknown as DelayNodeConfig;
+
+ // Validate delay type
+ const validDelayTypes = ["fixed", "random", "exponential"];
+ if (!typed.delayType || !validDelayTypes.includes(typed.delayType)) {
+ errors.push("Valid delay type is required");
+ }
+
+ // Validate delay value
+ if (typeof typed.value !== "number" || typed.value < 0) {
+ errors.push("Delay value must be a non-negative number");
+ }
+
+ // Validate unit
+ const validUnits = ["milliseconds", "seconds", "minutes", "hours"];
+ if (!typed.unit || !validUnits.includes(typed.unit)) {
+ errors.push("Valid time unit is required");
+ }
+
+ // Calculate delay in milliseconds for validation
+ if (typeof typed.value === "number" && typed.unit) {
+ try {
+ const delayMs = getDelayMs({ value: typed.value, unit: typed.unit });
+
+ // Validate reasonable delay limits
+ if (delayMs > 24 * 60 * 60 * 1000) { // 24 hours
+ errors.push("Delay cannot exceed 24 hours");
+ }
+ } catch (error) {
+ if (error instanceof Error) {
+ errors.push(error.message);
+ }
+ }
+ }
+
+ // Validate max delay for random/exponential types
+ if (typed.delayType === "random" || typed.delayType === "exponential") {
+ if (typed.maxDelayMs !== undefined && typed.maxDelayMs !== null) {
+ if (typeof typed.maxDelayMs !== "number" || typed.maxDelayMs <= 0) {
+ errors.push("Max delay must be a positive number");
+ }
+ }
+ }
+
+ return errors;
+ },
+ getDefaults: (): DelayNodeConfig => ({
+ delayType: "fixed",
+ unit: "seconds",
+ value: 1,
+ passthrough: true,
+ }),
+};
diff --git a/nodes/DelayNode/DelayNode.service.ts b/nodes/DelayNode/DelayNode.service.ts
new file mode 100644
index 0000000..9c1cc30
--- /dev/null
+++ b/nodes/DelayNode/DelayNode.service.ts
@@ -0,0 +1,146 @@
+import { DelayNodeConfig, DelayExecutionResult, getDelayMs } from './DelayNode.types'
+import { NodeExecutionContext, NodeExecutionResult } from '../types'
+
+export async function executeDelayNode(context: NodeExecutionContext): Promise {
+ const startTime = new Date()
+
+ try {
+ const config = context.config as unknown as DelayNodeConfig
+
+ // Validate required configuration
+ if (typeof config.value !== 'number' || config.value <= 0) {
+ return {
+ success: false,
+ error: 'Valid delay value is required'
+ }
+ }
+
+ // Check for abort signal before starting delay
+ if (context.signal?.aborted) {
+ return {
+ success: false,
+ error: 'Execution was cancelled'
+ }
+ }
+
+ // Convert delay value to milliseconds using helper function
+ let baseDelayMs: number
+ try {
+ baseDelayMs = getDelayMs({ value: config.value, unit: config.unit })
+ } catch (error) {
+ return {
+ success: false,
+ error: error instanceof Error ? error.message : 'Invalid delay configuration'
+ }
+ }
+
+ let actualDelayMs: number
+ // Calculate actual delay based on delay type
+ switch (config.delayType) {
+ case 'fixed':
+ actualDelayMs = baseDelayMs
+ break
+
+ case 'random':
+ const maxDelay = Math.max(0, config.maxDelayMs ?? (baseDelayMs * 2))
+ if (maxDelay <= baseDelayMs) {
+ actualDelayMs = baseDelayMs
+ } else {
+ actualDelayMs = baseDelayMs + Math.random() * (maxDelay - baseDelayMs)
+ }
+ break
+
+ case 'exponential':
+ // Simple exponential backoff simulation
+ const maxExp = config.maxDelayMs || baseDelayMs * 4
+ const exponentialDelay = baseDelayMs * Math.pow(2, Math.random() * 3)
+ actualDelayMs = Math.min(exponentialDelay, maxExp)
+ break
+
+ default:
+ return {
+ success: false,
+ error: `Unsupported delay type: ${config.delayType}`
+ }
+ }
+
+ // Ensure delay is within reasonable bounds
+ actualDelayMs = Math.max(1, Math.min(actualDelayMs, 24 * 60 * 60 * 1000)) // 1ms to 24 hours
+
+ // PLACEHOLDER IMPLEMENTATION
+ // In a real implementation, this would:
+ // 1. Handle very long delays efficiently (not just setTimeout)
+ // 2. Support delay persistence across restarts
+ // 3. Implement proper cancellation mechanisms
+ // 4. Handle system sleep/hibernate scenarios
+
+ // Execute the delay with cancellation support
+ await new Promise((resolve, reject) => {
+ const timer = setTimeout(() => {
+ resolve()
+ }, actualDelayMs)
+
+ // Handle abort signal
+ if (context.signal) {
+ const abortHandler = () => {
+ clearTimeout(timer)
+ reject(new Error('Delay was cancelled'))
+ }
+
+ if (context.signal.aborted) {
+ clearTimeout(timer)
+ reject(new Error('Delay was cancelled'))
+ return
+ }
+
+ context.signal.addEventListener('abort', abortHandler, { once: true })
+
+ // Clean up event listener when promise resolves
+ timer && setTimeout(() => {
+ context.signal?.removeEventListener('abort', abortHandler)
+ }, actualDelayMs + 100)
+ }
+ })
+
+ const endTime = new Date()
+
+ const result: DelayExecutionResult = {
+ delayType: config.delayType,
+ actualDelayMs: Math.round(actualDelayMs),
+ plannedDelayMs: Math.round(baseDelayMs),
+ unit: config.unit,
+ startTime: startTime.toISOString(),
+ endTime: endTime.toISOString(),
+ passthrough: config.passthrough ?? true,
+ passthroughData: config.passthrough !== false ? context.input : undefined
+ }
+
+ return {
+ success: true,
+ output: result
+ }
+
+ } catch (error) {
+ const endTime = new Date()
+
+ if (error instanceof Error) {
+ // Handle specific error types
+ if (error.name === 'AbortError' || error.message.includes('cancelled')) {
+ return {
+ success: false,
+ error: 'Delay was cancelled'
+ }
+ }
+
+ return {
+ success: false,
+ error: error.message
+ }
+ }
+
+ return {
+ success: false,
+ error: 'Unknown error occurred during delay'
+ }
+ }
+}
diff --git a/nodes/DelayNode/DelayNode.test.ts b/nodes/DelayNode/DelayNode.test.ts
new file mode 100644
index 0000000..e1d34db
--- /dev/null
+++ b/nodes/DelayNode/DelayNode.test.ts
@@ -0,0 +1,309 @@
+import { describe, it, expect, vi, beforeEach } from 'vitest'
+import { executeDelayNode } from './DelayNode.service'
+import { DELAY_NODE_DEFINITION } from './DelayNode.schema'
+import { DelayNodeConfig, DelayExecutionResult } from './DelayNode.types'
+import { NodeExecutionContext } from '../types'
+
+describe('DelayNode', () => {
+ describe('Schema and Validation', () => {
+ it('should have correct node definition structure', () => {
+ expect(DELAY_NODE_DEFINITION.nodeType).toBe('action')
+ expect(DELAY_NODE_DEFINITION.subType).toBe('delay')
+ expect(DELAY_NODE_DEFINITION.label).toBe('Delay')
+ expect(DELAY_NODE_DEFINITION.parameters).toHaveLength(5)
+ })
+
+ it('should validate required fields', () => {
+ const invalidConfigs = [
+ {}, // empty config
+ { delayType: 'fixed' }, // missing value and unit
+ { value: 5 }, // missing delayType and unit
+ { unit: 'seconds' }, // missing delayType and value
+ { delayType: 'fixed', value: -1, unit: 'seconds' }, // negative value
+ ]
+
+ invalidConfigs.forEach(config => {
+ const errors = DELAY_NODE_DEFINITION.validate(config)
+ expect(errors.length).toBeGreaterThan(0)
+ })
+ })
+
+ it('should validate delay types', () => {
+ const config = {
+ delayType: 'invalid',
+ value: 5,
+ unit: 'seconds'
+ }
+
+ const errors = DELAY_NODE_DEFINITION.validate(config)
+ expect(errors).toContain('Valid delay type is required')
+ })
+
+ it('should validate time units', () => {
+ const config = {
+ delayType: 'fixed',
+ value: 5,
+ unit: 'invalid'
+ }
+
+ const errors = DELAY_NODE_DEFINITION.validate(config)
+ expect(errors).toContain('Valid time unit is required')
+ })
+
+ it('should validate delay value bounds', () => {
+ const configNegative = {
+ delayType: 'fixed',
+ value: -1,
+ unit: 'seconds'
+ }
+
+ const errorsNegative = DELAY_NODE_DEFINITION.validate(configNegative)
+ expect(errorsNegative).toContain('Delay value must be a non-negative number')
+
+ const configTooLong = {
+ delayType: 'fixed',
+ value: 25,
+ unit: 'hours'
+ }
+
+ const errorsTooLong = DELAY_NODE_DEFINITION.validate(configTooLong)
+ expect(errorsTooLong).toContain('Delay cannot exceed 24 hours')
+ })
+
+ it('should pass validation with valid config', () => {
+ const config = {
+ delayType: 'fixed',
+ value: 5,
+ unit: 'seconds',
+ passthrough: true
+ }
+
+ const errors = DELAY_NODE_DEFINITION.validate(config)
+ expect(errors).toHaveLength(0)
+ })
+
+ it('should allow zero delay values', () => {
+ const config = {
+ delayType: 'fixed',
+ value: 0,
+ unit: 'seconds',
+ passthrough: true
+ }
+
+ const errors = DELAY_NODE_DEFINITION.validate(config)
+ expect(errors).toHaveLength(0)
+ })
+
+ it('should provide correct defaults', () => {
+ const defaults = DELAY_NODE_DEFINITION.getDefaults()
+ expect(defaults).toEqual({
+ delayType: 'fixed',
+ unit: 'seconds',
+ value: 1,
+ passthrough: true
+ })
+ })
+ })
+
+ describe('Delay Execution', () => {
+ let mockContext: NodeExecutionContext
+
+ beforeEach(() => {
+ mockContext = {
+ nodeId: 'test-node',
+ workflowId: 'test-workflow',
+ executionId: 'test-execution',
+ config: {
+ delayType: 'fixed',
+ value: 0.1, // 100ms for fast tests
+ unit: 'seconds',
+ passthrough: true
+ } as DelayNodeConfig,
+ input: { testData: 'input data' },
+ previousNodes: []
+ }
+ })
+
+ it('should execute FIXED delay successfully', async () => {
+ const startTime = Date.now()
+ const result = await executeDelayNode(mockContext)
+ const endTime = Date.now()
+ const actualDelay = endTime - startTime
+
+ expect(result.success).toBe(true)
+ expect(result.output).toMatchObject({
+ delayType: 'fixed',
+ actualDelayMs: 100,
+ plannedDelayMs: 100,
+ unit: 'seconds',
+ passthrough: true,
+ passthroughData: { testData: 'input data' }
+ })
+
+ // Check that actual delay was approximately correct (allow 50ms tolerance)
+ expect(actualDelay).toBeGreaterThanOrEqual(90)
+ expect(actualDelay).toBeLessThan(200)
+ })
+
+ it('should execute RANDOM delay successfully', async () => {
+ mockContext.config = {
+ delayType: 'random',
+ value: 0.1,
+ unit: 'seconds',
+ maxDelayMs: 200,
+ passthrough: true
+ } as DelayNodeConfig
+
+ const result = await executeDelayNode(mockContext)
+
+ expect(result.success).toBe(true)
+ expect(result.output).toMatchObject({
+ delayType: 'random',
+ plannedDelayMs: 100,
+ unit: 'seconds',
+ passthrough: true
+ })
+
+ // Random delay should be between 0 and maxDelayMs
+ const actualDelay = (result.output as DelayExecutionResult).actualDelayMs
+ expect(actualDelay).toBeGreaterThan(0)
+ expect(actualDelay).toBeLessThanOrEqual(200)
+ })
+
+ it('should execute EXPONENTIAL delay successfully', async () => {
+ mockContext.config = {
+ delayType: 'exponential',
+ value: 0.05,
+ unit: 'seconds',
+ maxDelayMs: 300,
+ passthrough: true
+ } as DelayNodeConfig
+
+ const result = await executeDelayNode(mockContext)
+
+ expect(result.success).toBe(true)
+ expect(result.output).toMatchObject({
+ delayType: 'exponential',
+ plannedDelayMs: 50,
+ unit: 'seconds',
+ passthrough: true
+ })
+
+ // Exponential delay should be between base and max
+ const actualDelay = (result.output as DelayExecutionResult).actualDelayMs
+ expect(actualDelay).toBeGreaterThan(0)
+ expect(actualDelay).toBeLessThanOrEqual(300)
+ })
+
+ it('should handle different time units', async () => {
+ // Test milliseconds
+ mockContext.config = {
+ delayType: 'fixed',
+ value: 50,
+ unit: 'milliseconds',
+ passthrough: true
+ } as DelayNodeConfig
+
+ const result = await executeDelayNode(mockContext)
+
+ expect(result.success).toBe(true)
+ expect(result.output).toMatchObject({
+ actualDelayMs: 50,
+ plannedDelayMs: 50,
+ unit: 'milliseconds'
+ })
+ })
+
+ it('should handle passthrough disabled', async () => {
+ mockContext.config = {
+ delayType: 'fixed',
+ value: 0.01,
+ unit: 'seconds',
+ passthrough: false
+ } as DelayNodeConfig
+
+ const result = await executeDelayNode(mockContext)
+
+ expect(result.success).toBe(true)
+ expect(result.output).toMatchObject({
+ passthrough: false,
+ passthroughData: undefined
+ })
+ })
+
+ it('should handle invalid delay value', async () => {
+ mockContext.config = {
+ delayType: 'fixed',
+ value: -1,
+ unit: 'seconds',
+ passthrough: true
+ } as DelayNodeConfig
+
+ const result = await executeDelayNode(mockContext)
+
+ expect(result.success).toBe(false)
+ expect(result.error).toBe('Valid delay value is required')
+ })
+
+ it('should handle abort signal', async () => {
+ const abortController = new AbortController()
+ mockContext.signal = abortController.signal
+ mockContext.config = {
+ delayType: 'fixed',
+ value: 1, // 1 second delay
+ unit: 'seconds',
+ passthrough: true
+ } as DelayNodeConfig
+
+ // Abort after 50ms
+ setTimeout(() => abortController.abort(), 50)
+
+ const result = await executeDelayNode(mockContext)
+
+ expect(result.success).toBe(false)
+ expect(result.error).toBe('Delay was cancelled')
+ })
+
+ it('should handle pre-aborted signal', async () => {
+ const abortController = new AbortController()
+ abortController.abort()
+ mockContext.signal = abortController.signal
+
+ const result = await executeDelayNode(mockContext)
+
+ expect(result.success).toBe(false)
+ expect(result.error).toBe('Execution was cancelled')
+ })
+
+ it('should handle unsupported delay type', async () => {
+ mockContext.config = {
+ delayType: 'invalid' as DelayNodeConfig['delayType'],
+ value: 1,
+ unit: 'seconds',
+ passthrough: true
+ } as DelayNodeConfig
+
+ const result = await executeDelayNode(mockContext)
+
+ expect(result.success).toBe(false)
+ expect(result.error).toBe('Unsupported delay type: invalid')
+ })
+
+ it('should enforce delay bounds', async () => {
+ // Test very large delay gets capped
+ mockContext.config = {
+ delayType: 'fixed',
+ value: 0.01, // Use a very small value for testing the cap logic
+ unit: 'seconds',
+ passthrough: true
+ } as DelayNodeConfig
+
+ const result = await executeDelayNode(mockContext)
+
+ // Should execute successfully and respect bounds
+ expect(result.success).toBe(true)
+ expect((result.output as DelayExecutionResult).actualDelayMs).toBeGreaterThan(0)
+ expect((result.output as DelayExecutionResult).actualDelayMs).toBeLessThanOrEqual(24 * 60 * 60 * 1000)
+ })
+ })
+})
diff --git a/nodes/DelayNode/DelayNode.tsx b/nodes/DelayNode/DelayNode.tsx
new file mode 100644
index 0000000..cfe700f
--- /dev/null
+++ b/nodes/DelayNode/DelayNode.tsx
@@ -0,0 +1,50 @@
+import React from 'react'
+import { Clock } from 'lucide-react'
+import { ActionType } from '@/types/workflow'
+import { BaseNode } from '@/components/workflow/nodes/base-node'
+import { DelayNodeData } from './DelayNode.types'
+
+interface DelayNodeProps {
+ data: DelayNodeData
+ selected?: boolean
+}
+
+export function DelayNode({ data, selected }: DelayNodeProps) {
+ const displayConfig = {
+ delayType: data.config?.delayType ?? 'fixed',
+ value: data.config?.value ?? 1,
+ unit: data.config?.unit ?? 'seconds'
+ }
+
+ const delayTypeLabels = {
+ fixed: 'Fixed',
+ random: 'Random',
+ exponential: 'Exponential'
+ }
+
+ const unitLabels = {
+ milliseconds: 'ms',
+ seconds: 's',
+ minutes: 'm',
+ hours: 'h'
+ }
+
+ // Create enhanced data with description for BaseNode
+ const enhancedData = {
+ ...data,
+ description: `${delayTypeLabels[displayConfig.delayType as keyof typeof delayTypeLabels]} • ${displayConfig.value}${unitLabels[displayConfig.unit as keyof typeof unitLabels]} - Wait: ${displayConfig.value} ${displayConfig.unit} (${displayConfig.delayType})`
+ }
+
+ return (
+ }
+ color="#3b82f6"
+ />
+ )
+}
+
+// Export the node type for registration
+export const DELAY_NODE_TYPE = ActionType.DELAY
diff --git a/nodes/DelayNode/DelayNode.types.ts b/nodes/DelayNode/DelayNode.types.ts
new file mode 100644
index 0000000..d1e5e8d
--- /dev/null
+++ b/nodes/DelayNode/DelayNode.types.ts
@@ -0,0 +1,51 @@
+import { ActionNodeData, ActionType } from '@/types/workflow'
+
+export interface DelayNodeConfig extends Record {
+ delayType: 'fixed' | 'random' | 'exponential'
+ maxDelayMs?: number
+ unit: 'milliseconds' | 'seconds' | 'minutes' | 'hours'
+ value: number
+ passthrough: boolean
+}
+
+/**
+ * Helper function to compute delay in milliseconds from value and unit
+ */
+export function getDelayMs(config: { value: number; unit: string }): number {
+ const multipliers = {
+ milliseconds: 1,
+ seconds: 1000,
+ minutes: 60 * 1000,
+ hours: 60 * 60 * 1000,
+ } as const
+
+ const unitMultiplier = multipliers[config.unit as keyof typeof multipliers]
+ if (unitMultiplier === undefined) {
+ throw new Error(`Invalid unit: ${config.unit}`)
+ }
+
+ const delayMs = config.value * unitMultiplier
+ if (!Number.isFinite(delayMs)) {
+ throw new Error('Invalid computed delay value')
+ }
+
+ return delayMs
+}
+
+export interface DelayNodeData extends ActionNodeData {
+ actionType: ActionType.DELAY
+ config: DelayNodeConfig
+}
+
+export interface DelayExecutionResult {
+ delayType: string
+ actualDelayMs: number
+ plannedDelayMs: number
+ unit: string
+ startTime: string
+ endTime: string
+ passthrough: boolean
+ passthroughData?: unknown
+}
+
+export type { DelayNodeConfig as DelayConfig }
diff --git a/nodes/DelayNode/index.ts b/nodes/DelayNode/index.ts
new file mode 100644
index 0000000..3d99549
--- /dev/null
+++ b/nodes/DelayNode/index.ts
@@ -0,0 +1,4 @@
+export { DelayNode, DELAY_NODE_TYPE } from './DelayNode'
+export { executeDelayNode } from './DelayNode.service'
+export { DELAY_NODE_DEFINITION } from './DelayNode.schema'
+export type { DelayNodeConfig, DelayNodeData, DelayExecutionResult } from './DelayNode.types'
diff --git a/nodes/EmailNode/EmailNode.schema.ts b/nodes/EmailNode/EmailNode.schema.ts
index 0b1fd49..69c0580 100644
--- a/nodes/EmailNode/EmailNode.schema.ts
+++ b/nodes/EmailNode/EmailNode.schema.ts
@@ -1,29 +1,8 @@
import { NodeType, ActionType } from '@/types/workflow'
import { EmailNodeConfig } from './EmailNode.types'
+import { ParameterDefinition, NodeDefinition } from '../index'
-interface ParameterDefinition {
- path: string
- label: string
- type: 'text' | 'textarea' | 'select' | 'number' | 'boolean' | 'email' | 'url' | 'password'
- required?: boolean
- defaultValue?: unknown
- options?: Array<{ label: string; value: string }>
- placeholder?: string
- description?: string
- showIf?: Array<{ path: string; equals: string | number | boolean }>
-}
-
-interface NodeDefinition {
- nodeType: NodeType
- subType: ActionType
- label: string
- description: string
- parameters: ParameterDefinition[]
- validate: (config: Record) => string[]
- getDefaults: () => EmailNodeConfig
-}
-
-export const EMAIL_NODE_DEFINITION: NodeDefinition = {
+export const EMAIL_NODE_DEFINITION: NodeDefinition = {
nodeType: NodeType.ACTION,
subType: ActionType.EMAIL,
label: 'Send Email',
@@ -32,7 +11,7 @@ export const EMAIL_NODE_DEFINITION: NodeDefinition = {
{
path: 'to',
label: 'To',
- type: 'email',
+ type: 'stringList',
required: true,
defaultValue: [],
description: 'Email recipients',
@@ -134,7 +113,7 @@ export const EMAIL_NODE_DEFINITION: NodeDefinition = {
],
validate: (config: Record): string[] => {
const errors: string[] = []
- const typed = config as unknown as EmailNodeConfig
+ const typed = config as EmailNodeConfig
if (!Array.isArray(typed.to) || typed.to.length === 0) {
errors.push('At least one recipient (To) is required')
diff --git a/nodes/EmailNode/EmailNode.test.ts b/nodes/EmailNode/EmailNode.test.ts
index 367b702..3a8cc71 100644
--- a/nodes/EmailNode/EmailNode.test.ts
+++ b/nodes/EmailNode/EmailNode.test.ts
@@ -34,7 +34,7 @@ describe('EmailNode', () => {
from: 'sender@example.com'
})
- const errors = EMAIL_NODE_DEFINITION.validate(config as Record)
+ const errors = EMAIL_NODE_DEFINITION.validate(config)
expect(errors).toHaveLength(0)
})
@@ -43,7 +43,7 @@ describe('EmailNode', () => {
to: []
})
- const errors = EMAIL_NODE_DEFINITION.validate(config as Record)
+ const errors = EMAIL_NODE_DEFINITION.validate(config)
expect(errors).toContain('At least one recipient (To) is required')
})
@@ -52,7 +52,7 @@ describe('EmailNode', () => {
subject: ''
})
- const errors = EMAIL_NODE_DEFINITION.validate(config as Record)
+ const errors = EMAIL_NODE_DEFINITION.validate(config)
expect(errors).toContain('Subject is required')
})
@@ -61,7 +61,7 @@ describe('EmailNode', () => {
body: ''
})
- const errors = EMAIL_NODE_DEFINITION.validate(config as Record)
+ const errors = EMAIL_NODE_DEFINITION.validate(config)
expect(errors).toContain('Email body is required')
})
@@ -70,7 +70,7 @@ describe('EmailNode', () => {
to: ['invalid-email']
})
- const errors = EMAIL_NODE_DEFINITION.validate(config as Record)
+ const errors = EMAIL_NODE_DEFINITION.validate(config)
expect(errors).toContain('Invalid email format for recipient 1: invalid-email')
})
@@ -79,7 +79,7 @@ describe('EmailNode', () => {
from: 'invalid-sender-email'
})
- const errors = EMAIL_NODE_DEFINITION.validate(config as Record)
+ const errors = EMAIL_NODE_DEFINITION.validate(config)
expect(errors).toContain('Invalid email format for sender: invalid-sender-email')
})
@@ -88,7 +88,7 @@ describe('EmailNode', () => {
to: ['test1@example.com', 'test2@example.com', 'invalid-email']
})
- const errors = EMAIL_NODE_DEFINITION.validate(config as Record)
+ const errors = EMAIL_NODE_DEFINITION.validate(config)
expect(errors).toContain('Invalid email format for recipient 3: invalid-email')
expect(errors).toHaveLength(1)
})
diff --git a/nodes/ManualNode/ManualNode.service.ts b/nodes/ManualNode/ManualNode.service.ts
index 32f0f38..bd8dbe9 100644
--- a/nodes/ManualNode/ManualNode.service.ts
+++ b/nodes/ManualNode/ManualNode.service.ts
@@ -22,7 +22,7 @@ export class ManualNodeService {
const result: ManualExecutionResult = {
triggered: true,
timestamp: new Date(),
- triggeredBy: context.workflowId || 'unknown',
+ triggeredBy: context.nodeId || 'unknown',
reason: 'Manual execution triggered'
}
diff --git a/nodes/ManualNode/ManualNode.test.ts b/nodes/ManualNode/ManualNode.test.ts
index 5b03a54..1ef1a7a 100644
--- a/nodes/ManualNode/ManualNode.test.ts
+++ b/nodes/ManualNode/ManualNode.test.ts
@@ -42,7 +42,7 @@ describe('ManualNode', () => {
expect(result.success).toBe(true)
const output = result.output as ManualExecutionResult
expect(output?.triggered).toBe(true)
- expect(output?.triggeredBy).toBe('workflow-1')
+ expect(output?.triggeredBy).toBe('manual-1')
})
})
diff --git a/nodes/TransformNode/TransformNode.schema.ts b/nodes/TransformNode/TransformNode.schema.ts
new file mode 100644
index 0000000..0abddff
--- /dev/null
+++ b/nodes/TransformNode/TransformNode.schema.ts
@@ -0,0 +1,121 @@
+import { NodeType, ActionType } from "@/types/workflow";
+import { TransformNodeConfig } from "./TransformNode.types";
+import { NodeDefinition, ParameterDefinition } from "../index";
+import { parse } from "espree";
+
+
+
+
+
+export const TRANSFORM_NODE_DEFINITION: NodeDefinition = {
+ nodeType: NodeType.ACTION,
+ subType: ActionType.TRANSFORM,
+ label: "Data Transform",
+ description: "Transform data using JavaScript or JSONPath (placeholder implementation)",
+ parameters: [
+ {
+ path: "operation",
+ label: "Operation",
+ type: "select",
+ required: true,
+ default: "map",
+ options: [
+ { label: "Map (Transform each item)", value: "map" },
+ { label: "Filter (Select items)", value: "filter" },
+ { label: "Reduce (Aggregate data)", value: "reduce" },
+ { label: "Sort (Order items)", value: "sort" },
+ { label: "Group (Group by key)", value: "group" },
+ { label: "Merge (Combine objects)", value: "merge" },
+ ],
+ description: "Type of data transformation to perform",
+ },
+ {
+ path: "language",
+ label: "Script Language",
+ type: "select",
+ required: true,
+ default: "javascript",
+ options: [
+ { label: "JavaScript", value: "javascript" },
+ { label: "JSONPath", value: "jsonpath" },
+ ],
+ description: "Language for transformation script",
+ },
+ {
+ path: "script",
+ label: "Transformation Script",
+ type: "textarea",
+ required: true,
+ default: "",
+ placeholder: "// For map operation:\nreturn { ...item, processed: true }",
+ description: "Script to transform the data",
+ },
+ {
+ path: "inputPath",
+ label: "Input Path",
+ type: "text",
+ required: false,
+ default: "",
+ placeholder: "data.items",
+ description: "JSONPath to extract input data (optional)",
+ },
+ {
+ path: "outputPath",
+ label: "Output Path",
+ type: "text",
+ required: false,
+ default: "",
+ placeholder: "result.transformed",
+ description: "Path to store transformed data (optional)",
+ },
+ ],
+ validate: (config: Record): string[] => {
+ const errors: string[] = [];
+ const typed = config as unknown as TransformNodeConfig;
+
+ // Validate operation
+ const validOperations = ["map", "filter", "reduce", "sort", "group", "merge"];
+ if (!typed.operation || !validOperations.includes(typed.operation)) {
+ errors.push("Valid operation is required");
+ }
+
+ // Validate language
+ const validLanguages = ["javascript", "jsonpath"];
+ if (!typed.language || !validLanguages.includes(typed.language)) {
+ errors.push("Valid script language is required");
+ }
+
+ // Validate script
+ if (!typed.script || typeof typed.script !== "string" || typed.script.trim().length === 0) {
+ errors.push("Transformation script is required");
+ }
+
+ // Safe JavaScript syntax validation using static parser
+ if (typed.language === "javascript" && typed.script) {
+ try {
+ // Parse the script using espree - this only validates syntax without execution
+ // Wrap in a function context to validate it as a function body
+ const wrappedScript = `function transform(item, index, array) {\n${typed.script}\n}`;
+ parse(wrappedScript, {
+ ecmaVersion: 2020,
+ sourceType: "script"
+ });
+ } catch (parseError: unknown) {
+ const errorMessage = parseError instanceof Error ? parseError.message : "Unknown parsing error";
+ errors.push(`Invalid JavaScript syntax in transformation script: ${errorMessage}`);
+ }
+ }
+
+ // Note: For production environments, consider additional server-side validation
+ // using a secure sandbox for extra safety when executing user scripts
+
+ return errors;
+ },
+ getDefaults: (): TransformNodeConfig => ({
+ operation: "map",
+ language: "javascript",
+ script: "",
+ inputPath: "",
+ outputPath: "",
+ }),
+};
diff --git a/nodes/TransformNode/TransformNode.service.ts b/nodes/TransformNode/TransformNode.service.ts
new file mode 100644
index 0000000..9d90542
--- /dev/null
+++ b/nodes/TransformNode/TransformNode.service.ts
@@ -0,0 +1,243 @@
+import { TransformNodeConfig, TransformExecutionResult } from './TransformNode.types'
+import { NodeExecutionContext, NodeExecutionResult } from '../types'
+
+export async function executeTransformNode(context: NodeExecutionContext): Promise {
+ const startTime = Date.now()
+
+ try {
+ const config = context.config as unknown as TransformNodeConfig
+
+ // Validate required configuration
+ if (!config.script || config.script.trim().length === 0) {
+ return {
+ success: false,
+ error: 'Transformation script is required'
+ }
+ }
+
+ // Check for abort signal
+ if (context.signal?.aborted) {
+ return {
+ success: false,
+ error: 'Execution was cancelled'
+ }
+ }
+
+ // Get input data (from previous node or context input)
+ let inputData: unknown = context.input
+ if (config.inputPath) {
+ inputData = getNestedValue(context.input, config.inputPath)
+ }
+
+ // PLACEHOLDER IMPLEMENTATION
+ // In a real implementation, this would:
+ // 1. Parse and validate the transformation script
+ // 2. Execute the script in a sandboxed environment
+ // 3. Apply the transformation based on the operation type
+ // 4. Handle different script languages (JavaScript, JSONPath)
+
+ // Simulate transformation delay
+ await new Promise(resolve => setTimeout(resolve, 50))
+
+ // Check for abort signal immediately after delay
+ if (context.signal?.aborted) {
+ return {
+ success: false,
+ error: 'Execution was cancelled'
+ }
+ }
+
+ let transformedData: unknown
+ let itemsProcessed = 0
+
+ // Mock transformation based on operation type
+ switch (config.operation) {
+ case 'map':
+ if (Array.isArray(inputData)) {
+ transformedData = inputData.map((item, index) => {
+ itemsProcessed++
+ // Mock transformation - add a "processed" flag
+ return typeof item === 'object' && item !== null
+ ? { ...(item as Record), processed: true, transformedAt: new Date().toISOString() }
+ : { value: item as unknown, processed: true, transformedAt: new Date().toISOString() }
+ })
+ } else {
+ transformedData = typeof inputData === 'object' && inputData !== null
+ ? { ...inputData as Record, processed: true, transformedAt: new Date().toISOString() }
+ : { value: inputData, processed: true, transformedAt: new Date().toISOString() }
+ itemsProcessed = 1
+ }
+ break
+
+ case 'filter':
+ if (Array.isArray(inputData)) {
+ // Mock filter - keep only truthy items or items with specific properties
+ transformedData = inputData.filter((item, index) => {
+ itemsProcessed++
+ return item && (typeof item !== 'object' || Object.keys(item as object).length > 0)
+ })
+ } else {
+ transformedData = inputData ? [inputData] : []
+ itemsProcessed = 1
+ }
+ break
+
+ case 'reduce':
+ if (Array.isArray(inputData)) {
+ itemsProcessed = inputData.length
+ transformedData = {
+ count: inputData.length,
+ summary: 'Mock aggregation result',
+ firstItem: (inputData[0] as unknown) ?? null,
+ lastItem: (inputData[inputData.length - 1] as unknown) ?? null
+ }
+ } else {
+ transformedData = { count: 1, value: inputData }
+ itemsProcessed = 1
+ }
+ break
+
+ case 'sort':
+ if (Array.isArray(inputData)) {
+ itemsProcessed = inputData.length
+ // Mock sort - reverse the array
+ transformedData = [...(inputData as unknown[])].reverse()
+ } else {
+ transformedData = [inputData]
+ itemsProcessed = 1
+ }
+ break
+
+ case 'group':
+ if (Array.isArray(inputData)) {
+ itemsProcessed = inputData.length
+ // Mock grouping by type
+ const groups: Record = {}
+ inputData.forEach(item => {
+ const type = typeof item
+ if (!groups[type]) groups[type] = []
+ groups[type].push(item)
+ })
+ transformedData = groups
+ } else {
+ transformedData = { [typeof inputData]: [inputData] }
+ itemsProcessed = 1
+ }
+ break
+
+ case 'merge':
+ if (Array.isArray(inputData)) {
+ itemsProcessed = inputData.length
+ // Mock merge - combine all objects
+ transformedData = inputData.reduce((acc: Record, item) => {
+ if (typeof item === 'object' && item !== null) {
+ return { ...acc, ...(item as Record) }
+ }
+ return acc
+ }, {} as Record)
+ } else {
+ transformedData = inputData
+ itemsProcessed = 1
+ }
+ break
+
+ default:
+ return {
+ success: false,
+ error: `Unsupported operation: ${config.operation}`
+ }
+ }
+
+ // Apply output path if specified
+ let finalOutput = transformedData
+ if (config.outputPath) {
+ const outputContainer = {}
+ setNestedValue(outputContainer, config.outputPath, transformedData)
+ finalOutput = outputContainer
+ }
+
+ // Check for abort signal before building result
+ if (context.signal?.aborted) {
+ return {
+ success: false,
+ error: 'Execution was cancelled'
+ }
+ }
+
+ // Calculate final duration after all transformation work is complete
+ const duration = Date.now() - startTime
+
+ const result: TransformExecutionResult = {
+ operation: config.operation,
+ originalData: inputData,
+ transformedData: finalOutput,
+ duration,
+ itemsProcessed
+ }
+
+ return {
+ success: true,
+ output: result
+ }
+
+ } catch (error) {
+ const duration = Date.now() - startTime
+
+ if (error instanceof Error) {
+ // Handle specific error types
+ if (error.name === 'AbortError') {
+ return {
+ success: false,
+ error: 'Transform operation was cancelled'
+ }
+ }
+
+ return {
+ success: false,
+ error: error.message
+ }
+ }
+
+ return {
+ success: false,
+ error: 'Unknown error occurred during data transformation'
+ }
+ }
+}
+
+// Helper function to get nested values
+function getNestedValue(obj: unknown, path: string): unknown {
+ if (!path) return obj
+
+ return path.split('.').reduce((acc: unknown, part: string) => {
+ if (acc && typeof acc === 'object') {
+ return (acc as Record)[part]
+ }
+ return undefined
+ }, obj)
+}
+
+// Helper function to set nested values
+function setNestedValue(obj: Record, path: string, value: unknown): void {
+ if (!path) return
+
+ // Prevent prototype pollution
+ const dangerousKeys = ['__proto__', 'constructor', 'prototype']
+ const parts = path.split('.')
+
+ if (parts.some(part => dangerousKeys.includes(part))) {
+ throw new Error('Dangerous path detected')
+ }
+
+ let current: Record = obj
+
+ for (let i = 0; i < parts.length - 1; i++) {
+ const part = parts[i]
+ if (!current[part] || typeof current[part] !== 'object') {
+ current[part] = {}
+ }
+ current = current[part] as Record
+ }
+
+ current[parts[parts.length - 1]] = value
+}
\ No newline at end of file
diff --git a/nodes/TransformNode/TransformNode.test.ts b/nodes/TransformNode/TransformNode.test.ts
new file mode 100644
index 0000000..1321db7
--- /dev/null
+++ b/nodes/TransformNode/TransformNode.test.ts
@@ -0,0 +1,353 @@
+import { describe, it, expect, vi, beforeEach } from 'vitest'
+import { executeTransformNode } from './TransformNode.service'
+import { TRANSFORM_NODE_DEFINITION } from './TransformNode.schema'
+import { TransformNodeConfig } from './TransformNode.types'
+import { NodeExecutionContext } from '../types'
+
+describe('TransformNode', () => {
+ describe('Schema and Validation', () => {
+ it('should have correct node definition structure', () => {
+ expect(TRANSFORM_NODE_DEFINITION.nodeType).toBe('action')
+ expect(TRANSFORM_NODE_DEFINITION.subType).toBe('transform')
+ expect(TRANSFORM_NODE_DEFINITION.label).toBe('Data Transform')
+ expect(TRANSFORM_NODE_DEFINITION.parameters).toHaveLength(5)
+ })
+
+ it('should validate required fields', () => {
+ const invalidConfigs = [
+ {}, // empty config
+ { operation: 'map' }, // missing script and language
+ { language: 'javascript' }, // missing operation and script
+ { operation: 'map', language: 'javascript', script: '' }, // empty script
+ ]
+
+ invalidConfigs.forEach(config => {
+ const errors = TRANSFORM_NODE_DEFINITION.validate(config)
+ expect(errors.length).toBeGreaterThan(0)
+ })
+ })
+
+ it('should validate operation types', () => {
+ const config = {
+ operation: 'invalid',
+ language: 'javascript',
+ script: 'return item'
+ }
+
+ const errors = TRANSFORM_NODE_DEFINITION.validate(config)
+ expect(errors).toContain('Valid operation is required')
+ })
+
+ it('should validate language types', () => {
+ const config = {
+ operation: 'map',
+ language: 'python',
+ script: 'return item'
+ }
+
+ const errors = TRANSFORM_NODE_DEFINITION.validate(config)
+ expect(errors).toContain('Valid script language is required')
+ })
+
+ it('should validate JavaScript syntax', () => {
+ const config = {
+ operation: 'map',
+ language: 'javascript',
+ script: 'invalid javascript syntax {'
+ }
+
+ const errors = TRANSFORM_NODE_DEFINITION.validate(config)
+ expect(errors.some(error => error.startsWith('Invalid JavaScript syntax in transformation script'))).toBe(true)
+ })
+
+ it('should pass validation with valid config', () => {
+ const config = {
+ operation: 'map',
+ language: 'javascript',
+ script: 'return { ...item, processed: true }'
+ }
+
+ const errors = TRANSFORM_NODE_DEFINITION.validate(config)
+ expect(errors).toHaveLength(0)
+ })
+
+ it('should provide correct defaults', () => {
+ const defaults = TRANSFORM_NODE_DEFINITION.getDefaults()
+ expect(defaults).toEqual({
+ operation: 'map',
+ language: 'javascript',
+ script: '',
+ inputPath: '',
+ outputPath: ''
+ })
+ })
+ })
+
+ describe('Transform Execution', () => {
+ let mockContext: NodeExecutionContext
+
+ beforeEach(() => {
+ mockContext = {
+ nodeId: 'test-node',
+ workflowId: 'test-workflow',
+ executionId: 'test-execution',
+ config: {
+ operation: 'map',
+ language: 'javascript',
+ script: 'return { ...item, processed: true }'
+ } as TransformNodeConfig & Record & Record,
+ input: [{ id: 1, name: 'Item 1' }, { id: 2, name: 'Item 2' }],
+ previousNodes: []
+ }
+ })
+
+ it('should execute MAP operation successfully', async () => {
+ const result = await executeTransformNode(mockContext)
+
+ expect(result.success).toBe(true)
+ expect(result.output).toMatchObject({
+ operation: 'map',
+ originalData: expect.arrayContaining([
+ expect.objectContaining({ id: 1, name: 'Item 1' })
+ ]) as unknown[],
+ transformedData: expect.arrayContaining([
+ expect.objectContaining({ id: 1, name: 'Item 1', processed: true })
+ ]) as unknown[],
+ itemsProcessed: 2
+ })
+ expect(typeof (result.output as Record).duration).toBe('number')
+ })
+
+ it('should execute FILTER operation successfully', async () => {
+ mockContext.config = {
+ operation: 'filter',
+ language: 'javascript',
+ script: 'return item.id > 1'
+ } as TransformNodeConfig & Record
+
+ const result = await executeTransformNode(mockContext)
+
+ expect(result.success).toBe(true)
+ expect(result.output).toMatchObject({
+ operation: 'filter',
+ originalData: [{ id: 1, name: 'Item 1' }, { id: 2, name: 'Item 2' }],
+ itemsProcessed: 2
+ })
+
+ // Assert the exact filtered content (mock filter keeps truthy objects with properties)
+ const output = result.output as { transformedData: unknown[], itemsProcessed: number, operation: string }
+ expect(Array.isArray(output.transformedData)).toBe(true)
+ expect(output.transformedData).toHaveLength(2) // Both items pass the mock filter
+ expect(output.transformedData).toEqual([
+ { id: 1, name: 'Item 1' },
+ { id: 2, name: 'Item 2' }
+ ])
+ expect(output.itemsProcessed).toBe(2)
+ expect(output.operation).toBe('filter')
+ })
+
+ it('should execute REDUCE operation successfully', async () => {
+ mockContext.config = {
+ operation: 'reduce',
+ language: 'javascript',
+ script: 'return acc + item.id'
+ } as TransformNodeConfig & Record
+
+ const result = await executeTransformNode(mockContext)
+
+ expect(result.success).toBe(true)
+ expect(result.output).toMatchObject({
+ operation: 'reduce',
+ transformedData: expect.objectContaining({
+ count: 2,
+ summary: expect.stringMatching(/.*/) as string
+ }) as unknown,
+ itemsProcessed: 2
+ })
+ })
+
+ it('should execute SORT operation successfully', async () => {
+ mockContext.config = {
+ operation: 'sort',
+ language: 'javascript',
+ script: 'return a.id - b.id'
+ } as TransformNodeConfig & Record
+
+ const result = await executeTransformNode(mockContext)
+
+ expect(result.success).toBe(true)
+ expect(result.output).toMatchObject({
+ operation: 'sort',
+ originalData: [{ id: 1, name: 'Item 1' }, { id: 2, name: 'Item 2' }],
+ itemsProcessed: 2
+ })
+
+ // Assert the exact sorted content (mock sort reverses the array)
+ const output = result.output as { transformedData: unknown[], itemsProcessed: number, operation: string }
+ expect(Array.isArray(output.transformedData)).toBe(true)
+ expect(output.transformedData).toHaveLength(2)
+ expect(output.transformedData).toEqual([
+ { id: 2, name: 'Item 2' }, // Reversed order
+ { id: 1, name: 'Item 1' }
+ ])
+ expect(output.itemsProcessed).toBe(2)
+ expect(output.operation).toBe('sort')
+ })
+
+ it('should execute GROUP operation successfully', async () => {
+ // Update mock input to include category values for proper grouping
+ mockContext.input = [
+ { id: 1, name: 'Item 1', category: 'electronics' },
+ { id: 2, name: 'Item 2', category: 'books' },
+ { id: 3, name: 'Item 3', category: 'electronics' },
+ { id: 4, name: 'Item 4', category: 'books' }
+ ]
+
+ mockContext.config = {
+ operation: 'group',
+ language: 'javascript',
+ script: 'return item.category'
+ } as TransformNodeConfig & Record
+
+ const result = await executeTransformNode(mockContext)
+
+ expect(result.success).toBe(true)
+ expect(result.output).toMatchObject({
+ operation: 'group',
+ originalData: [
+ { id: 1, name: 'Item 1', category: 'electronics' },
+ { id: 2, name: 'Item 2', category: 'books' },
+ { id: 3, name: 'Item 3', category: 'electronics' },
+ { id: 4, name: 'Item 4', category: 'books' }
+ ],
+ itemsProcessed: 4
+ })
+
+ // Verify proper grouping semantics (mock groups by typeof, not by script)
+ const output = result.output as { transformedData: Record, itemsProcessed: number, operation: string }
+ expect(typeof output.transformedData).toBe('object')
+ expect(output.transformedData).toHaveProperty('object') // All items are objects
+ expect(Array.isArray(output.transformedData.object)).toBe(true)
+ expect(output.transformedData.object).toHaveLength(4) // All 4 items grouped under 'object'
+ expect(output.transformedData.object).toEqual([
+ { id: 1, name: 'Item 1', category: 'electronics' },
+ { id: 2, name: 'Item 2', category: 'books' },
+ { id: 3, name: 'Item 3', category: 'electronics' },
+ { id: 4, name: 'Item 4', category: 'books' }
+ ])
+ expect(output.itemsProcessed).toBe(4)
+ expect(output.operation).toBe('group')
+ })
+
+ it('should execute MERGE operation successfully', async () => {
+ mockContext.input = [{ a: 1 }, { b: 2 }, { c: 3 }]
+ mockContext.config = {
+ operation: 'merge',
+ language: 'javascript',
+ script: 'return item'
+ } as TransformNodeConfig & Record
+
+ const result = await executeTransformNode(mockContext)
+
+ expect(result.success).toBe(true)
+ expect(result.output).toMatchObject({
+ operation: 'merge',
+ transformedData: expect.objectContaining({ a: 1, b: 2, c: 3 }) as unknown,
+ itemsProcessed: 3
+ })
+ })
+
+ it('should handle non-array input data', async () => {
+ mockContext.input = { id: 1, name: 'Single Item' }
+
+ const result = await executeTransformNode(mockContext)
+
+ expect(result.success).toBe(true)
+ expect(result.output).toMatchObject({
+ operation: 'map',
+ itemsProcessed: 1
+ })
+ })
+
+ it('should handle inputPath configuration', async () => {
+ mockContext.input = { data: { items: [{ id: 1 }] } }
+ mockContext.config = {
+ ...(mockContext.config as unknown as TransformNodeConfig),
+ inputPath: 'data.items'
+ } as TransformNodeConfig & Record
+
+ const result = await executeTransformNode(mockContext)
+
+ expect(result.success).toBe(true)
+ expect(result.output).toMatchObject({
+ originalData: [{ id: 1 }]
+ })
+ })
+
+ it('should handle outputPath configuration', async () => {
+ mockContext.config = {
+ ...(mockContext.config as unknown as TransformNodeConfig),
+ outputPath: 'result.transformed'
+ } as TransformNodeConfig & Record
+
+ const result = await executeTransformNode(mockContext)
+
+ expect(result.success).toBe(true)
+ expect(result.output).toMatchObject({
+ transformedData: expect.objectContaining({
+ result: expect.objectContaining({
+ transformed: expect.any(Array) as unknown[]
+ }) as unknown
+ }) as unknown
+ })
+
+ // Verify the array contains the expected transformed elements
+ const output = result.output as { transformedData: { result: { transformed: unknown[] } } }
+ expect(Array.isArray(output.transformedData.result.transformed)).toBe(true)
+ expect(output.transformedData.result.transformed).toHaveLength(2)
+ expect(output.transformedData.result.transformed).toEqual(
+ expect.arrayContaining([
+ expect.objectContaining({ id: 1, name: 'Item 1', processed: true }),
+ expect.objectContaining({ id: 2, name: 'Item 2', processed: true })
+ ])
+ )
+ })
+
+ it('should handle missing script', async () => {
+ mockContext.config = {
+ operation: 'map',
+ language: 'javascript',
+ script: ''
+ } as TransformNodeConfig & Record
+
+ const result = await executeTransformNode(mockContext)
+
+ expect(result.success).toBe(false)
+ expect(result.error).toBe('Transformation script is required')
+ })
+
+ it('should handle abort signal', async () => {
+ const abortController = new AbortController()
+ mockContext.signal = abortController.signal
+ abortController.abort()
+
+ const result = await executeTransformNode(mockContext)
+
+ expect(result.success).toBe(false)
+ expect(result.error).toBe('Execution was cancelled')
+ })
+
+ it('should handle unsupported operation', async () => {
+ mockContext.config = {
+ operation: 'invalid' as TransformNodeConfig['operation'],
+ language: 'javascript',
+ script: 'return item'
+ } as TransformNodeConfig & Record
+
+ const result = await executeTransformNode(mockContext)
+
+ expect(result.success).toBe(false)
+ expect(result.error).toBe('Unsupported operation: invalid')
+ })
+ })
+})
diff --git a/nodes/TransformNode/TransformNode.tsx b/nodes/TransformNode/TransformNode.tsx
new file mode 100644
index 0000000..8bea822
--- /dev/null
+++ b/nodes/TransformNode/TransformNode.tsx
@@ -0,0 +1,45 @@
+import React from 'react'
+import { Shuffle } from 'lucide-react'
+import { ActionType } from '@/types/workflow'
+import { BaseNode } from '@/components/workflow/nodes/base-node'
+import { TransformNodeData } from './TransformNode.types'
+
+interface TransformNodeProps {
+ data: TransformNodeData
+ selected?: boolean
+}
+
+export function TransformNode({ data, selected }: TransformNodeProps) {
+ const displayConfig = {
+ operation: data.config?.operation || 'map',
+ language: data.config?.language || 'javascript'
+ }
+
+ const operationLabels = {
+ map: 'Map',
+ filter: 'Filter',
+ reduce: 'Reduce',
+ sort: 'Sort',
+ group: 'Group',
+ merge: 'Merge'
+ }
+
+ // Create enhanced data with description for BaseNode
+ const enhancedData = {
+ ...data,
+ description: `${operationLabels[displayConfig.operation as keyof typeof operationLabels]} • ${displayConfig.language} - Script: ${data.config?.script?.substring(0, 30) || 'Not configured'}${(data.config?.script?.length || 0) > 30 ? '...' : ''}`
+ }
+
+ return (
+ }
+ color="#f97316"
+ />
+ )
+}
+
+// Export the node type for registration
+export const TRANSFORM_NODE_TYPE = ActionType.TRANSFORM
diff --git a/nodes/TransformNode/TransformNode.types.ts b/nodes/TransformNode/TransformNode.types.ts
new file mode 100644
index 0000000..69e29f3
--- /dev/null
+++ b/nodes/TransformNode/TransformNode.types.ts
@@ -0,0 +1,25 @@
+import { ActionNodeData, ActionType } from '@/types/workflow'
+
+export interface TransformNodeConfig extends Record {
+ operation: 'map' | 'filter' | 'reduce' | 'sort' | 'group' | 'merge'
+ script: string
+ language: 'javascript' | 'jsonpath'
+ inputPath?: string
+ outputPath?: string
+ options?: Record
+}
+
+export interface TransformNodeData extends ActionNodeData {
+ actionType: ActionType.TRANSFORM
+ config: TransformNodeConfig
+}
+
+export interface TransformExecutionResult {
+ operation: string
+ originalData: unknown
+ transformedData: unknown
+ duration: number
+ itemsProcessed: number
+}
+
+export type { TransformNodeConfig as TransformConfig }
diff --git a/nodes/TransformNode/index.ts b/nodes/TransformNode/index.ts
new file mode 100644
index 0000000..ba4d0d3
--- /dev/null
+++ b/nodes/TransformNode/index.ts
@@ -0,0 +1,4 @@
+export { TransformNode, TRANSFORM_NODE_TYPE } from './TransformNode'
+export { executeTransformNode } from './TransformNode.service'
+export { TRANSFORM_NODE_DEFINITION } from './TransformNode.schema'
+export type { TransformNodeConfig, TransformNodeData, TransformExecutionResult } from './TransformNode.types'
diff --git a/nodes/index.ts b/nodes/index.ts
index edd0426..161d983 100644
--- a/nodes/index.ts
+++ b/nodes/index.ts
@@ -1,5 +1,6 @@
// Central node registry and exports
import { NodeType } from '../types/workflow'
+import { CredentialType } from '../types/credentials'
import type { NodeExecutionContext, NodeExecutionResult } from './types'
// Import all nodes
@@ -10,20 +11,37 @@ export * from './WebhookNode'
export * from './ManualNode'
export * from './IfNode'
export * from './FilterNode'
+export * from './DatabaseNode'
+export * from './TransformNode'
+export * from './DelayNode'
// Base interfaces for all nodes
export type { NodeExecutionContext, NodeExecutionResult } from './types'
-export interface ParameterDefinition {
- name: string
+// Discriminated union to require exactly one of 'path' or 'name'
+export type ParameterAddress =
+ | { path: string; name?: never }
+ | { name: string; path?: never }
+
+// Discriminated union for showIf conditions to require exactly one of 'path' or 'name'
+export type ShowIfCondition =
+ | { path: string; name?: never; equals: string | number | boolean }
+ | { name: string; path?: never; equals: string | number | boolean }
+
+export type ParameterDefinition = ParameterAddress & {
label: string
- type: 'text' | 'textarea' | 'select' | 'number' | 'boolean' | 'email' | 'url' | 'json' | 'password'
+ type: 'string' | 'text' | 'textarea' | 'select' | 'number' | 'boolean' | 'email' | 'url' | 'json' | 'password' | 'credential' | 'stringList'
required?: boolean
+ // Default value for this parameter
+ default?: unknown
+ // Legacy support for defaultValue
defaultValue?: unknown
- options?: Array<{ label: string; value: string }>
+ options?: Array<{ label: string; value: string }> | (() => Array<{ label: string; value: string }>)
placeholder?: string
description?: string
- showIf?: Array<{ path: string; equals: string | number | boolean }>
+ showIf?: ShowIfCondition[]
+ // For credential type parameters
+ credentialType?: CredentialType
}
import type { ReactNode } from 'react'
@@ -43,7 +61,7 @@ export interface NodeDefinition> {
parameters: ParameterDefinition[]
// Validation
- validate: (config: TConfig) => string[]
+ validate: (config: Record) => string[]
// Defaults
getDefaults: () => TConfig
@@ -55,9 +73,49 @@ export interface NodeDefinition> {
// Node registry for dynamic discovery
export const NODE_REGISTRY: Map = new Map()
+// Runtime validation helpers
+function validateParameterAddress(param: ParameterDefinition, paramIndex: number): void {
+ const hasPath = 'path' in param && param.path !== undefined && param.path !== null
+ const hasName = 'name' in param && param.name !== undefined && param.name !== null
+
+ if (!hasPath && !hasName) {
+ throw new Error(`Parameter at index ${paramIndex} must have either 'path' or 'name' defined, but has neither`)
+ }
+
+ if (hasPath && hasName) {
+ throw new Error(`Parameter at index ${paramIndex} cannot have both 'path' and 'name' defined, must have exactly one`)
+ }
+}
+
+function validateShowIfCondition(condition: ShowIfCondition, paramIndex: number, conditionIndex: number): void {
+ const hasPath = 'path' in condition && condition.path !== undefined && condition.path !== null
+ const hasName = 'name' in condition && condition.name !== undefined && condition.name !== null
+
+ if (!hasPath && !hasName) {
+ throw new Error(`ShowIf condition at index ${conditionIndex} for parameter at index ${paramIndex} must have either 'path' or 'name' defined, but has neither`)
+ }
+
+ if (hasPath && hasName) {
+ throw new Error(`ShowIf condition at index ${conditionIndex} for parameter at index ${paramIndex} cannot have both 'path' and 'name' defined, must have exactly one`)
+ }
+}
+
// Utility functions for node registry management
export function registerNode(definition: NodeDefinition): void {
const key = `${definition.nodeType}-${definition.subType}`
+
+ // Runtime validation of parameter definitions
+ definition.parameters.forEach((param, paramIndex) => {
+ validateParameterAddress(param, paramIndex)
+
+ // Validate showIf conditions if present
+ if (param.showIf) {
+ param.showIf.forEach((condition, conditionIndex) => {
+ validateShowIfCondition(condition, paramIndex, conditionIndex)
+ })
+ }
+ })
+
if (NODE_REGISTRY.has(key)) {
console.warn(`Warning: Overwriting existing node definition for key "${key}"`)
}
@@ -104,15 +162,21 @@ import { WEBHOOK_NODE_DEFINITION } from './WebhookNode'
import { MANUAL_NODE_DEFINITION } from './ManualNode'
import { IF_NODE_DEFINITION } from './IfNode'
import { FILTER_NODE_DEFINITION } from './FilterNode'
+import { DATABASE_NODE_DEFINITION } from './DatabaseNode'
+import { TRANSFORM_NODE_DEFINITION } from './TransformNode'
+import { DELAY_NODE_DEFINITION } from './DelayNode'
// Register all nodes on module load
-// EMAIL_NODE_DEFINITION is handled directly in findNodeDefinition for now
+registerNode(EMAIL_NODE_DEFINITION)
registerNode(HTTP_NODE_DEFINITION)
registerNode(SCHEDULE_NODE_DEFINITION)
registerNode(WEBHOOK_NODE_DEFINITION)
registerNode(MANUAL_NODE_DEFINITION)
registerNode(IF_NODE_DEFINITION)
registerNode(FILTER_NODE_DEFINITION)
+registerNode(DATABASE_NODE_DEFINITION)
+registerNode(TRANSFORM_NODE_DEFINITION)
+registerNode(DELAY_NODE_DEFINITION)
// Export types for external use
export type { NodeType } from '../types/workflow'
\ No newline at end of file
diff --git a/package-lock.json b/package-lock.json
index 6b4e19e..43fb8c8 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -21,6 +21,7 @@
"class-variance-authority": "^0.7.0",
"clsx": "^2.1.1",
"crypto-js": "^4.2.0",
+ "espree": "^10.4.0",
"lucide-react": "^0.370.0",
"motion": "^12.23.12",
"next": "^15.1.3",
@@ -35,6 +36,7 @@
"zustand": "^4.5.3"
},
"devDependencies": {
+ "@types/espree": "^10.1.0",
"@types/node": "^22.7.4",
"@types/react": "^19.0.2",
"@types/react-dom": "^19.0.2",
@@ -621,6 +623,37 @@
"url": "https://opencollective.com/eslint"
}
},
+ "node_modules/@eslint/eslintrc/node_modules/eslint-visitor-keys": {
+ "version": "3.4.3",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz",
+ "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint"
+ }
+ },
+ "node_modules/@eslint/eslintrc/node_modules/espree": {
+ "version": "9.6.1",
+ "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz",
+ "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==",
+ "dev": true,
+ "license": "BSD-2-Clause",
+ "dependencies": {
+ "acorn": "^8.9.0",
+ "acorn-jsx": "^5.3.2",
+ "eslint-visitor-keys": "^3.4.1"
+ },
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint"
+ }
+ },
"node_modules/@eslint/eslintrc/node_modules/strip-json-comments": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
@@ -3074,6 +3107,17 @@
"dev": true,
"license": "MIT"
},
+ "node_modules/@types/espree": {
+ "version": "10.1.0",
+ "resolved": "https://registry.npmjs.org/@types/espree/-/espree-10.1.0.tgz",
+ "integrity": "sha512-uPQZdoUWWMuO6WS8/dwX1stZH/vOBa/wAniGnYEFI0IuU9RmLx6PLmo+VGfNOlbRc5I7hBsQc8H0zcdVI37kxg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "acorn": "^8.12.0",
+ "eslint-visitor-keys": "^4.0.0"
+ }
+ },
"node_modules/@types/estree": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz",
@@ -3838,7 +3882,6 @@
"version": "8.15.0",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
"integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
- "dev": true,
"license": "MIT",
"bin": {
"acorn": "bin/acorn"
@@ -3851,7 +3894,6 @@
"version": "5.3.2",
"resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz",
"integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==",
- "dev": true,
"license": "MIT",
"peerDependencies": {
"acorn": "^6.0.0 || ^7.0.0 || ^8.0.0"
@@ -5673,7 +5715,6 @@
"version": "4.2.1",
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz",
"integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==",
- "dev": true,
"license": "Apache-2.0",
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -5718,7 +5759,7 @@
"url": "https://opencollective.com/eslint"
}
},
- "node_modules/espree": {
+ "node_modules/eslint/node_modules/espree": {
"version": "9.6.1",
"resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz",
"integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==",
@@ -5736,14 +5777,18 @@
"url": "https://opencollective.com/eslint"
}
},
- "node_modules/espree/node_modules/eslint-visitor-keys": {
- "version": "3.4.3",
- "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz",
- "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==",
- "dev": true,
- "license": "Apache-2.0",
+ "node_modules/espree": {
+ "version": "10.4.0",
+ "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz",
+ "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==",
+ "license": "BSD-2-Clause",
+ "dependencies": {
+ "acorn": "^8.15.0",
+ "acorn-jsx": "^5.3.2",
+ "eslint-visitor-keys": "^4.2.1"
+ },
"engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
"funding": {
"url": "https://opencollective.com/eslint"
diff --git a/package.json b/package.json
index 9608a3f..02d3208 100644
--- a/package.json
+++ b/package.json
@@ -35,6 +35,7 @@
"class-variance-authority": "^0.7.0",
"clsx": "^2.1.1",
"crypto-js": "^4.2.0",
+ "espree": "^10.4.0",
"lucide-react": "^0.370.0",
"motion": "^12.23.12",
"next": "^15.1.3",
@@ -49,6 +50,7 @@
"zustand": "^4.5.3"
},
"devDependencies": {
+ "@types/espree": "^10.1.0",
"@types/node": "^22.7.4",
"@types/react": "^19.0.2",
"@types/react-dom": "^19.0.2",
diff --git a/server/services/workflow-executor.ts b/server/services/workflow-executor.ts
index 597fa09..f1ddd79 100644
--- a/server/services/workflow-executor.ts
+++ b/server/services/workflow-executor.ts
@@ -21,6 +21,9 @@ import { executeHttpNode } from '@/nodes/HttpNode'
import { executeManualNode } from '@/nodes/ManualNode'
import { executeIfNode } from '@/nodes/IfNode'
import { executeFilterNode } from '@/nodes/FilterNode'
+import { executeDatabaseNode } from '@/nodes/DatabaseNode'
+import { executeTransformNode } from '@/nodes/TransformNode'
+import { executeDelayNode } from '@/nodes/DelayNode'
import { WebhookNodeService } from '@/nodes/WebhookNode/WebhookNode.service'
import { NodeExecutionContext } from '@/nodes/types'
@@ -188,17 +191,17 @@ export class WorkflowExecutor {
// Execute based on node type
switch (node.data.nodeType) {
case NodeType.TRIGGER:
- return await this.executeTriggerNode(node)
+ return await this.executeTriggerNode(node, signal)
case NodeType.ACTION:
return await this.executeActionNode(node, signal)
case NodeType.LOGIC:
- return await this.executeLogicNode(node)
+ return await this.executeLogicNode(node, signal)
default:
throw new Error('Unknown node type')
}
}
- private async executeTriggerNode(node: WorkflowNode): Promise {
+ private async executeTriggerNode(node: WorkflowNode, signal?: AbortSignal): Promise {
const { triggerType, config } = node.data as { triggerType: TriggerType; config: unknown }
const context: NodeExecutionContext = {
@@ -207,7 +210,8 @@ export class WorkflowExecutor {
config: config as Record,
input: this.getPreviousNodeOutput(node) || {},
previousNodes: this.getPreviousNodes(node),
- executionId: this.execution.id
+ executionId: this.execution.id,
+ signal
}
switch (triggerType) {
@@ -253,7 +257,8 @@ export class WorkflowExecutor {
config: config as Record,
input: this.getPreviousNodeOutput(node) || {},
previousNodes: this.getPreviousNodes(node),
- executionId: this.execution.id
+ executionId: this.execution.id,
+ signal
}
switch (actionType) {
@@ -273,18 +278,28 @@ export class WorkflowExecutor {
return result.output
}
- case ActionType.DATABASE:
- // Mock database query
- return { rows: [], affected: 0 }
+ case ActionType.DATABASE: {
+ const result = await executeDatabaseNode(context)
+ if (!result.success) {
+ throw new Error(result.error || 'Database execution failed')
+ }
+ return result.output
+ }
- case ActionType.TRANSFORM:
- // Mock data transformation
- return { transformed: this.getPreviousNodeOutput(node) }
+ case ActionType.TRANSFORM: {
+ const result = await executeTransformNode(context)
+ if (!result.success) {
+ throw new Error(result.error || 'Transform execution failed')
+ }
+ return result.output
+ }
case ActionType.DELAY: {
- const delayMs = (config as { delayMs?: number }).delayMs || 1000
- await new Promise(resolve => setTimeout(resolve, delayMs))
- return { delayed: delayMs }
+ const result = await executeDelayNode(context)
+ if (!result.success) {
+ throw new Error(result.error || 'Delay execution failed')
+ }
+ return result.output
}
default:
@@ -292,7 +307,7 @@ export class WorkflowExecutor {
}
}
- private async executeLogicNode(node: WorkflowNode): Promise {
+ private async executeLogicNode(node: WorkflowNode, signal?: AbortSignal): Promise {
const { logicType, config } = node.data as { logicType: LogicType; config: unknown }
const context: NodeExecutionContext = {
@@ -301,7 +316,8 @@ export class WorkflowExecutor {
config: config as Record,
input: this.getPreviousNodeOutput(node) || {},
previousNodes: this.getPreviousNodes(node),
- executionId: this.execution.id
+ executionId: this.execution.id,
+ signal
}
switch (logicType) {
diff --git a/tests/integration/node-import-consistency.test.ts b/tests/integration/node-import-consistency.test.ts
new file mode 100644
index 0000000..f4d7278
--- /dev/null
+++ b/tests/integration/node-import-consistency.test.ts
@@ -0,0 +1,215 @@
+import { describe, it, expect } from 'vitest'
+import { NodeType, ActionType, TriggerType, LogicType } from '@/types/workflow'
+
+describe('Node Import Consistency', () => {
+ describe('Individual Node Imports', () => {
+ it('should import EmailNode components correctly', async () => {
+ const emailModule = await import('@/nodes/EmailNode')
+
+ expect(emailModule.EMAIL_NODE_DEFINITION).toBeDefined()
+ expect(emailModule.executeEmailNode).toBeDefined()
+ expect(emailModule.EmailNode).toBeDefined()
+ expect(typeof emailModule.executeEmailNode).toBe('function')
+
+ // Check node definition structure
+ expect(emailModule.EMAIL_NODE_DEFINITION.nodeType).toBe(NodeType.ACTION)
+ expect(emailModule.EMAIL_NODE_DEFINITION.subType).toBe(ActionType.EMAIL)
+ })
+
+ it('should import HttpNode components correctly', async () => {
+ const httpModule = await import('@/nodes/HttpNode')
+
+ expect(httpModule.HTTP_NODE_DEFINITION).toBeDefined()
+ expect(httpModule.executeHttpNode).toBeDefined()
+ expect(httpModule.HttpNode).toBeDefined()
+ expect(typeof httpModule.executeHttpNode).toBe('function')
+
+ expect(httpModule.HTTP_NODE_DEFINITION.nodeType).toBe(NodeType.ACTION)
+ expect(httpModule.HTTP_NODE_DEFINITION.subType).toBe(ActionType.HTTP)
+ })
+
+ it('should import ScheduleNode components correctly', async () => {
+ const scheduleModule = await import('@/nodes/ScheduleNode')
+
+ expect(scheduleModule.SCHEDULE_NODE_DEFINITION).toBeDefined()
+ expect(scheduleModule.ScheduleNodeService).toBeDefined()
+ expect(scheduleModule.ScheduleNode).toBeDefined()
+
+ expect(scheduleModule.SCHEDULE_NODE_DEFINITION.nodeType).toBe(NodeType.TRIGGER)
+ expect(scheduleModule.SCHEDULE_NODE_DEFINITION.subType).toBe(TriggerType.SCHEDULE)
+ })
+
+ it('should import WebhookNode components correctly', async () => {
+ const webhookModule = await import('@/nodes/WebhookNode')
+
+ expect(webhookModule.WEBHOOK_NODE_DEFINITION).toBeDefined()
+ expect(webhookModule.WebhookNodeService).toBeDefined()
+ expect(webhookModule.WebhookNode).toBeDefined()
+
+ expect(webhookModule.WEBHOOK_NODE_DEFINITION.nodeType).toBe(NodeType.TRIGGER)
+ expect(webhookModule.WEBHOOK_NODE_DEFINITION.subType).toBe(TriggerType.WEBHOOK)
+ })
+
+ it('should import ManualNode components correctly', async () => {
+ const manualModule = await import('@/nodes/ManualNode')
+
+ expect(manualModule.MANUAL_NODE_DEFINITION).toBeDefined()
+ expect(manualModule.executeManualNode).toBeDefined()
+ expect(manualModule.ManualNode).toBeDefined()
+ expect(typeof manualModule.executeManualNode).toBe('function')
+
+ expect(manualModule.MANUAL_NODE_DEFINITION.nodeType).toBe(NodeType.TRIGGER)
+ expect(manualModule.MANUAL_NODE_DEFINITION.subType).toBe(TriggerType.MANUAL)
+ })
+
+ it('should import IfNode components correctly', async () => {
+ const ifModule = await import('@/nodes/IfNode')
+
+ expect(ifModule.IF_NODE_DEFINITION).toBeDefined()
+ expect(ifModule.executeIfNode).toBeDefined()
+ expect(ifModule.IfNode).toBeDefined()
+ expect(typeof ifModule.executeIfNode).toBe('function')
+
+ expect(ifModule.IF_NODE_DEFINITION.nodeType).toBe(NodeType.LOGIC)
+ expect(ifModule.IF_NODE_DEFINITION.subType).toBe(LogicType.IF)
+ })
+
+ it('should import FilterNode components correctly', async () => {
+ const filterModule = await import('@/nodes/FilterNode')
+
+ expect(filterModule.FILTER_NODE_DEFINITION).toBeDefined()
+ expect(filterModule.executeFilterNode).toBeDefined()
+ expect(filterModule.FilterNode).toBeDefined()
+ expect(typeof filterModule.executeFilterNode).toBe('function')
+
+ expect(filterModule.FILTER_NODE_DEFINITION.nodeType).toBe(NodeType.LOGIC)
+ expect(filterModule.FILTER_NODE_DEFINITION.subType).toBe(LogicType.FILTER)
+ })
+
+ it('should import DatabaseNode components correctly', async () => {
+ const databaseModule = await import('@/nodes/DatabaseNode')
+
+ expect(databaseModule.DATABASE_NODE_DEFINITION).toBeDefined()
+ expect(databaseModule.executeDatabaseNode).toBeDefined()
+ expect(databaseModule.DatabaseNode).toBeDefined()
+ expect(typeof databaseModule.executeDatabaseNode).toBe('function')
+
+ expect(databaseModule.DATABASE_NODE_DEFINITION.nodeType).toBe(NodeType.ACTION)
+ expect(databaseModule.DATABASE_NODE_DEFINITION.subType).toBe(ActionType.DATABASE)
+ })
+
+ it('should import TransformNode components correctly', async () => {
+ const transformModule = await import('@/nodes/TransformNode')
+
+ expect(transformModule.TRANSFORM_NODE_DEFINITION).toBeDefined()
+ expect(transformModule.executeTransformNode).toBeDefined()
+ expect(transformModule.TransformNode).toBeDefined()
+ expect(typeof transformModule.executeTransformNode).toBe('function')
+
+ expect(transformModule.TRANSFORM_NODE_DEFINITION.nodeType).toBe(NodeType.ACTION)
+ expect(transformModule.TRANSFORM_NODE_DEFINITION.subType).toBe(ActionType.TRANSFORM)
+ })
+
+ it('should import DelayNode components correctly', async () => {
+ const delayModule = await import('@/nodes/DelayNode')
+
+ expect(delayModule.DELAY_NODE_DEFINITION).toBeDefined()
+ expect(delayModule.executeDelayNode).toBeDefined()
+ expect(delayModule.DelayNode).toBeDefined()
+ expect(typeof delayModule.executeDelayNode).toBe('function')
+
+ expect(delayModule.DELAY_NODE_DEFINITION.nodeType).toBe(NodeType.ACTION)
+ expect(delayModule.DELAY_NODE_DEFINITION.subType).toBe(ActionType.DELAY)
+ })
+ })
+
+ describe('Global Node Registry Import', () => {
+ it('should import all nodes from global registry', async () => {
+ const nodesModule = await import('@/nodes')
+
+ // Check registry functions
+ expect(nodesModule.NODE_REGISTRY).toBeDefined()
+ expect(nodesModule.registerNode).toBeDefined()
+ expect(nodesModule.getNodeDefinition).toBeDefined()
+ expect(nodesModule.getAllNodeDefinitions).toBeDefined()
+ expect(nodesModule.getNodesByType).toBeDefined()
+ expect(nodesModule.isNodeRegistered).toBeDefined()
+
+ // Check that all node definitions are exported
+ expect(nodesModule.EMAIL_NODE_DEFINITION).toBeDefined()
+ expect(nodesModule.HTTP_NODE_DEFINITION).toBeDefined()
+ expect(nodesModule.SCHEDULE_NODE_DEFINITION).toBeDefined()
+ expect(nodesModule.WEBHOOK_NODE_DEFINITION).toBeDefined()
+ expect(nodesModule.MANUAL_NODE_DEFINITION).toBeDefined()
+ expect(nodesModule.IF_NODE_DEFINITION).toBeDefined()
+ expect(nodesModule.FILTER_NODE_DEFINITION).toBeDefined()
+ expect(nodesModule.DATABASE_NODE_DEFINITION).toBeDefined()
+ expect(nodesModule.TRANSFORM_NODE_DEFINITION).toBeDefined()
+ expect(nodesModule.DELAY_NODE_DEFINITION).toBeDefined()
+
+ // Check that all execution functions are exported
+ expect(nodesModule.executeEmailNode).toBeDefined()
+ expect(nodesModule.executeHttpNode).toBeDefined()
+ expect(nodesModule.executeManualNode).toBeDefined()
+ expect(nodesModule.executeIfNode).toBeDefined()
+ expect(nodesModule.executeFilterNode).toBeDefined()
+ expect(nodesModule.executeDatabaseNode).toBeDefined()
+ expect(nodesModule.executeTransformNode).toBeDefined()
+ expect(nodesModule.executeDelayNode).toBeDefined()
+
+ // Check that React components are exported
+ expect(nodesModule.EmailNode).toBeDefined()
+ expect(nodesModule.HttpNode).toBeDefined()
+ expect(nodesModule.ScheduleNode).toBeDefined()
+ expect(nodesModule.WebhookNode).toBeDefined()
+ expect(nodesModule.ManualNode).toBeDefined()
+ expect(nodesModule.IfNode).toBeDefined()
+ expect(nodesModule.FilterNode).toBeDefined()
+ expect(nodesModule.DatabaseNode).toBeDefined()
+ expect(nodesModule.TransformNode).toBeDefined()
+ expect(nodesModule.DelayNode).toBeDefined()
+ })
+ })
+
+ describe('Legacy Compatibility Import', () => {
+ it('should import legacy compatibility functions', async () => {
+ const legacyModule = await import('@/lib/node-definitions')
+
+ expect(legacyModule.findNodeDefinition).toBeDefined()
+ expect(legacyModule.getDefaultConfigForNode).toBeDefined()
+ expect(legacyModule.validateNodeBeforeExecute).toBeDefined()
+
+ expect(typeof legacyModule.findNodeDefinition).toBe('function')
+ expect(typeof legacyModule.getDefaultConfigForNode).toBe('function')
+ expect(typeof legacyModule.validateNodeBeforeExecute).toBe('function')
+ })
+ })
+
+ describe('Type Consistency', () => {
+ it('should have all required exports from nodes', async () => {
+ const nodesModule = await import('@/nodes')
+
+ // Check that all node execution functions are available
+ expect(typeof nodesModule.executeEmailNode).toBe('function')
+ expect(typeof nodesModule.executeHttpNode).toBe('function')
+ expect(typeof nodesModule.executeManualNode).toBe('function')
+ expect(typeof nodesModule.executeIfNode).toBe('function')
+ expect(typeof nodesModule.executeFilterNode).toBe('function')
+ expect(typeof nodesModule.executeDatabaseNode).toBe('function')
+ expect(typeof nodesModule.executeTransformNode).toBe('function')
+ expect(typeof nodesModule.executeDelayNode).toBe('function')
+
+ // Check that all node definitions are available
+ expect(nodesModule.EMAIL_NODE_DEFINITION).toBeDefined()
+ expect(nodesModule.HTTP_NODE_DEFINITION).toBeDefined()
+ expect(nodesModule.SCHEDULE_NODE_DEFINITION).toBeDefined()
+ expect(nodesModule.WEBHOOK_NODE_DEFINITION).toBeDefined()
+ expect(nodesModule.MANUAL_NODE_DEFINITION).toBeDefined()
+ expect(nodesModule.IF_NODE_DEFINITION).toBeDefined()
+ expect(nodesModule.FILTER_NODE_DEFINITION).toBeDefined()
+ expect(nodesModule.DATABASE_NODE_DEFINITION).toBeDefined()
+ expect(nodesModule.TRANSFORM_NODE_DEFINITION).toBeDefined()
+ expect(nodesModule.DELAY_NODE_DEFINITION).toBeDefined()
+ })
+ })
+})
diff --git a/tests/integration/node-registry.test.ts b/tests/integration/node-registry.test.ts
new file mode 100644
index 0000000..f51ffc2
--- /dev/null
+++ b/tests/integration/node-registry.test.ts
@@ -0,0 +1,216 @@
+import { describe, it, expect, beforeEach, afterEach } from 'vitest'
+import {
+ NODE_REGISTRY,
+ getNodeDefinition,
+ getAllNodeDefinitions,
+ getNodesByType,
+ isNodeRegistered,
+ clearRegistry,
+ registerNode,
+ NodeDefinition
+} from '@/nodes'
+import { NodeType, ActionType, TriggerType, LogicType } from '@/types/workflow'
+
+describe('Node Registry Integration', () => {
+ describe('Registry Population', () => {
+ it('should have all expected nodes registered', () => {
+ const allNodes = getAllNodeDefinitions()
+
+ // Should have all 10 node types
+ expect(allNodes).toHaveLength(10)
+
+ // Check that we have the right number of each type
+ const triggerNodes = getNodesByType(NodeType.TRIGGER)
+ const actionNodes = getNodesByType(NodeType.ACTION)
+ const logicNodes = getNodesByType(NodeType.LOGIC)
+
+ expect(triggerNodes).toHaveLength(3) // MANUAL, SCHEDULE, WEBHOOK
+ expect(actionNodes).toHaveLength(5) // EMAIL, HTTP, DATABASE, TRANSFORM, DELAY
+ expect(logicNodes).toHaveLength(2) // IF, FILTER
+ })
+
+ it('should have all trigger nodes registered', () => {
+ expect(isNodeRegistered(NodeType.TRIGGER, TriggerType.MANUAL)).toBe(true)
+ expect(isNodeRegistered(NodeType.TRIGGER, TriggerType.SCHEDULE)).toBe(true)
+ expect(isNodeRegistered(NodeType.TRIGGER, TriggerType.WEBHOOK)).toBe(true)
+
+ expect(getNodeDefinition(NodeType.TRIGGER, TriggerType.MANUAL)).toBeDefined()
+ expect(getNodeDefinition(NodeType.TRIGGER, TriggerType.SCHEDULE)).toBeDefined()
+ expect(getNodeDefinition(NodeType.TRIGGER, TriggerType.WEBHOOK)).toBeDefined()
+ })
+
+ it('should have all action nodes registered', () => {
+ expect(isNodeRegistered(NodeType.ACTION, ActionType.EMAIL)).toBe(true)
+ expect(isNodeRegistered(NodeType.ACTION, ActionType.HTTP)).toBe(true)
+ expect(isNodeRegistered(NodeType.ACTION, ActionType.DATABASE)).toBe(true)
+ expect(isNodeRegistered(NodeType.ACTION, ActionType.TRANSFORM)).toBe(true)
+ expect(isNodeRegistered(NodeType.ACTION, ActionType.DELAY)).toBe(true)
+
+ expect(getNodeDefinition(NodeType.ACTION, ActionType.EMAIL)).toBeDefined()
+ expect(getNodeDefinition(NodeType.ACTION, ActionType.HTTP)).toBeDefined()
+ expect(getNodeDefinition(NodeType.ACTION, ActionType.DATABASE)).toBeDefined()
+ expect(getNodeDefinition(NodeType.ACTION, ActionType.TRANSFORM)).toBeDefined()
+ expect(getNodeDefinition(NodeType.ACTION, ActionType.DELAY)).toBeDefined()
+ })
+
+ it('should have all logic nodes registered', () => {
+ expect(isNodeRegistered(NodeType.LOGIC, LogicType.IF)).toBe(true)
+ expect(isNodeRegistered(NodeType.LOGIC, LogicType.FILTER)).toBe(true)
+
+ expect(getNodeDefinition(NodeType.LOGIC, LogicType.IF)).toBeDefined()
+ expect(getNodeDefinition(NodeType.LOGIC, LogicType.FILTER)).toBeDefined()
+ })
+ })
+
+ describe('Node Definition Validation', () => {
+ it('should have valid node definitions for all nodes', () => {
+ const allNodes = getAllNodeDefinitions()
+
+ allNodes.forEach(node => {
+ // Check required properties
+ expect(node.nodeType).toBeDefined()
+ expect(node.subType).toBeDefined()
+ expect(node.label).toBeDefined()
+ expect(node.description).toBeDefined()
+ expect(node.parameters).toBeDefined()
+ expect(Array.isArray(node.parameters)).toBe(true)
+ expect(typeof node.validate).toBe('function')
+ expect(typeof node.getDefaults).toBe('function')
+
+ // Test that validate function works
+ const defaults = node.getDefaults()
+ expect(defaults).toBeDefined()
+
+ // Validation should pass for default config
+ const errors = node.validate(defaults)
+ expect(Array.isArray(errors)).toBe(true)
+ })
+ })
+
+ it('should have unique node type/subtype combinations', () => {
+ const allNodes = getAllNodeDefinitions()
+ const combinations = new Set()
+
+ allNodes.forEach(node => {
+ const combo = `${node.nodeType}-${node.subType}`
+ expect(combinations.has(combo)).toBe(false)
+ combinations.add(combo)
+ })
+ })
+
+ it('should provide valid defaults for all nodes', () => {
+ const allNodes = getAllNodeDefinitions()
+
+ allNodes.forEach(node => {
+ const defaults = node.getDefaults()
+ expect(defaults).toBeDefined()
+ expect(typeof defaults).toBe('object')
+
+ // Note: Some nodes may have empty defaults that require user configuration
+ // This is expected behavior for nodes like Email that require sensitive credentials
+ const errors = node.validate(defaults)
+ expect(Array.isArray(errors)).toBe(true)
+
+ // For nodes that do provide complete defaults, they should validate
+ // Some nodes require user-provided configuration
+ const nodesRequiringConfig = [
+ ActionType.EMAIL,
+ ActionType.HTTP,
+ ActionType.DATABASE,
+ ActionType.TRANSFORM,
+ LogicType.IF,
+ LogicType.FILTER
+ ]
+ if (!nodesRequiringConfig.includes(node.subType as ActionType | LogicType)) { expect(errors).toEqual([])
+ }
+ })
+ })
+ })
+
+ describe('Registry Operations', () => {
+ let originalRegistry: Map
+
+ beforeEach(() => {
+ // Save original registry
+ originalRegistry = new Map(NODE_REGISTRY)
+ })
+
+ afterEach(() => {
+ // Restore original registry
+ clearRegistry()
+ originalRegistry.forEach((value, key) => {
+ NODE_REGISTRY.set(key, value)
+ })
+ })
+
+ it('should allow registering and unregistering nodes', () => {
+ // Clear registry for clean test
+ clearRegistry()
+ expect(getAllNodeDefinitions()).toHaveLength(0)
+
+ // Register a mock node
+ const mockNode = {
+ nodeType: NodeType.ACTION,
+ subType: 'test',
+ label: 'Test Node',
+ description: 'Test Description',
+ parameters: [],
+ validate: () => [],
+ getDefaults: () => ({})
+ }
+
+ registerNode(mockNode)
+ expect(getAllNodeDefinitions()).toHaveLength(1)
+ expect(isNodeRegistered(NodeType.ACTION, 'test')).toBe(true)
+ expect(getNodeDefinition(NodeType.ACTION, 'test')).toBe(mockNode)
+ })
+
+ it('should filter nodes by type correctly', () => {
+ const actionNodes = getNodesByType(NodeType.ACTION)
+ const triggerNodes = getNodesByType(NodeType.TRIGGER)
+ const logicNodes = getNodesByType(NodeType.LOGIC)
+
+ actionNodes.forEach(node => {
+ expect(node.nodeType).toBe(NodeType.ACTION)
+ })
+
+ triggerNodes.forEach(node => {
+ expect(node.nodeType).toBe(NodeType.TRIGGER)
+ })
+
+ logicNodes.forEach(node => {
+ expect(node.nodeType).toBe(NodeType.LOGIC)
+ })
+ })
+ })
+
+ describe('Node Type Coverage', () => {
+ it('should cover all ActionType enum values', () => {
+ const registeredActions = getNodesByType(NodeType.ACTION).map(n => n.subType)
+
+ // Check that all important action types are covered
+ expect(registeredActions).toContain(ActionType.EMAIL)
+ expect(registeredActions).toContain(ActionType.HTTP)
+ expect(registeredActions).toContain(ActionType.DATABASE)
+ expect(registeredActions).toContain(ActionType.TRANSFORM)
+ expect(registeredActions).toContain(ActionType.DELAY)
+ })
+
+ it('should cover all TriggerType enum values', () => {
+ const registeredTriggers = getNodesByType(NodeType.TRIGGER).map(n => n.subType)
+
+ // Check that all important trigger types are covered
+ expect(registeredTriggers).toContain(TriggerType.MANUAL)
+ expect(registeredTriggers).toContain(TriggerType.SCHEDULE)
+ expect(registeredTriggers).toContain(TriggerType.WEBHOOK)
+ })
+
+ it('should cover all LogicType enum values', () => {
+ const registeredLogic = getNodesByType(NodeType.LOGIC).map(n => n.subType)
+
+ // Check that all important logic types are covered
+ expect(registeredLogic).toContain(LogicType.IF)
+ expect(registeredLogic).toContain(LogicType.FILTER)
+ })
+ })
+})
diff --git a/tests/integration/workflow-execution.test.ts b/tests/integration/workflow-execution.test.ts
new file mode 100644
index 0000000..3eb9bb7
--- /dev/null
+++ b/tests/integration/workflow-execution.test.ts
@@ -0,0 +1,486 @@
+import { describe, it, expect, beforeEach } from 'vitest'
+import { WorkflowExecutor } from '@/server/services/workflow-executor'
+import {
+ Workflow,
+ WorkflowNode,
+ WorkflowEdge,
+ NodeType,
+ TriggerType,
+ ActionType,
+ LogicType
+} from '@/types/workflow'
+import { HttpExecutionResult } from '@/nodes/HttpNode/HttpNode.types'
+import { DatabaseExecutionResult } from '@/nodes/DatabaseNode/DatabaseNode.types'
+import { TransformExecutionResult } from '@/nodes/TransformNode/TransformNode.types'
+import { DelayExecutionResult } from '@/nodes/DelayNode/DelayNode.types'
+import { IfExecutionResult } from '@/nodes/IfNode/IfNode.types'
+import { v4 as uuidv4 } from 'uuid'
+
+describe('Workflow Execution Integration', () => {
+ let mockWorkflow: Workflow
+
+ beforeEach(() => {
+ mockWorkflow = {
+ id: 'test-workflow',
+ name: 'Test Workflow',
+ description: 'Integration test workflow',
+ nodes: [],
+ edges: [],
+ createdAt: new Date(),
+ updatedAt: new Date(),
+ isActive: true
+ }
+ })
+
+ describe('Single Node Execution', () => {
+ it('should execute a manual trigger node', async () => {
+ const nodeId = uuidv4()
+ const node: WorkflowNode = {
+ id: nodeId,
+ type: 'trigger',
+ position: { x: 0, y: 0 },
+ data: {
+ label: 'Manual Trigger',
+ nodeType: NodeType.TRIGGER,
+ triggerType: TriggerType.MANUAL,
+ config: {}
+ }
+ }
+
+ mockWorkflow.nodes = [node]
+ const executor = new WorkflowExecutor(mockWorkflow)
+ const result = await executor.execute()
+
+ expect(result.status).toBe('completed')
+ expect(result.logs.length).toBeGreaterThan(0)
+ expect(result.nodeOutputs[nodeId]).toBeDefined()
+ })
+
+ it('should execute an HTTP action node', async () => {
+ const nodeId = uuidv4()
+ const node: WorkflowNode = {
+ id: nodeId,
+ type: 'action',
+ position: { x: 0, y: 0 },
+ data: {
+ label: 'HTTP Request',
+ nodeType: NodeType.ACTION,
+ actionType: ActionType.HTTP,
+ config: {
+ method: 'GET',
+ url: 'https://httpbin.org/get',
+ authentication: { type: 'none' }
+ }
+ }
+ }
+
+ mockWorkflow.nodes = [node]
+ const executor = new WorkflowExecutor(mockWorkflow)
+ const result = await executor.execute({ startNodeId: nodeId })
+
+ // HTTP node execution may succeed or fail depending on network
+ expect(['completed', 'failed']).toContain(result.status)
+ expect(result.completedAt).toBeDefined()
+
+ if (result.status === 'completed') {
+ expect(result.nodeOutputs[nodeId]).toBeDefined()
+ const httpResult = result.nodeOutputs[nodeId] as HttpExecutionResult
+ expect(httpResult.url).toBe('https://httpbin.org/get')
+ }
+ })
+
+ it('should execute a database action node (placeholder)', async () => {
+ // Skip test in CI if TEST_DB_URL is not set
+ if (!process.env.TEST_DB_URL && process.env.CI === 'true') {
+ return // Skip test in CI environment without TEST_DB_URL
+ }
+
+ // This test uses mocked database operations - no live database required
+ const testConnectionString = process.env.TEST_DB_URL || 'postgresql://mock:mock@localhost:5432/mockdb'
+
+ const nodeId = uuidv4()
+ const node: WorkflowNode = {
+ id: nodeId,
+ type: 'action',
+ position: { x: 0, y: 0 },
+ data: {
+ label: 'Database Query',
+ nodeType: NodeType.ACTION,
+ actionType: ActionType.DATABASE,
+ config: {
+ operation: 'select',
+ connectionString: testConnectionString,
+ query: 'SELECT * FROM users'
+ }
+ }
+ }
+
+ mockWorkflow.nodes = [node]
+ const executor = new WorkflowExecutor(mockWorkflow)
+ const result = await executor.execute({ startNodeId: nodeId })
+
+ // Note: DatabaseNode service uses mock implementation, no actual DB connection is made
+ expect(result.status).toBe('completed')
+ expect(result.nodeOutputs[nodeId]).toBeDefined()
+
+ const dbResult = result.nodeOutputs[nodeId] as DatabaseExecutionResult
+ expect(dbResult.operation).toBe('select')
+ expect(dbResult.rows).toBeDefined()
+ // Verify we get mock data (indicating successful mock execution)
+ expect(Array.isArray(dbResult.rows)).toBe(true)
+ })
+
+ it('should execute a transform action node (placeholder)', async () => {
+ const nodeId = uuidv4()
+ const node: WorkflowNode = {
+ id: nodeId,
+ type: 'action',
+ position: { x: 0, y: 0 },
+ data: {
+ label: 'Data Transform',
+ nodeType: NodeType.ACTION,
+ actionType: ActionType.TRANSFORM,
+ config: {
+ operation: 'map',
+ language: 'javascript',
+ script: 'return { ...item, processed: true }'
+ }
+ }
+ }
+
+ mockWorkflow.nodes = [node]
+ const executor = new WorkflowExecutor(mockWorkflow)
+ const result = await executor.execute({ startNodeId: nodeId })
+
+ expect(result.status).toBe('completed')
+ expect(result.nodeOutputs[nodeId]).toBeDefined()
+
+ const transformResult = result.nodeOutputs[nodeId] as TransformExecutionResult
+ expect(transformResult.operation).toBe('map')
+ expect(transformResult.transformedData).toBeDefined()
+ })
+
+ it('should execute a delay action node (placeholder)', async () => {
+ const nodeId = uuidv4()
+ const node: WorkflowNode = {
+ id: nodeId,
+ type: 'action',
+ position: { x: 0, y: 0 },
+ data: {
+ label: 'Delay',
+ nodeType: NodeType.ACTION,
+ actionType: ActionType.DELAY,
+ config: {
+ delayType: 'fixed',
+ value: 0.01, // 10ms for fast test
+ unit: 'seconds',
+ passthrough: true
+ }
+ }
+ }
+
+ mockWorkflow.nodes = [node]
+ const executor = new WorkflowExecutor(mockWorkflow)
+ const result = await executor.execute({ startNodeId: nodeId })
+
+ expect(result.status).toBe('completed')
+ expect(result.nodeOutputs[nodeId]).toBeDefined()
+
+ const delayResult = result.nodeOutputs[nodeId] as DelayExecutionResult
+ expect(delayResult.delayType).toBe('fixed')
+ expect(delayResult.actualDelayMs).toBeGreaterThan(0)
+ })
+
+ it('should execute an IF logic node', async () => {
+ const nodeId = uuidv4()
+ const node: WorkflowNode = {
+ id: nodeId,
+ type: 'logic',
+ position: { x: 0, y: 0 },
+ data: {
+ label: 'IF Condition',
+ nodeType: NodeType.LOGIC,
+ logicType: LogicType.IF,
+ config: {
+ condition: {
+ field: 'value',
+ operator: 'equals',
+ value: 'test'
+ }
+ }
+ }
+ }
+
+ mockWorkflow.nodes = [node]
+ const executor = new WorkflowExecutor(mockWorkflow)
+ const result = await executor.execute({ startNodeId: nodeId })
+
+ expect(result.status).toBe('completed')
+ expect(result.nodeOutputs[nodeId]).toBeDefined()
+
+ const ifResult = result.nodeOutputs[nodeId] as IfExecutionResult
+ expect(ifResult.conditionMet).toBeDefined()
+ expect(typeof ifResult.conditionMet).toBe('boolean')
+ })
+
+ it('should execute a FILTER logic node', async () => {
+ const nodeId = uuidv4()
+ const node: WorkflowNode = {
+ id: nodeId,
+ type: 'logic',
+ position: { x: 0, y: 0 },
+ data: {
+ label: 'Filter Items',
+ nodeType: NodeType.LOGIC,
+ logicType: LogicType.FILTER,
+ config: {
+ condition: {
+ field: 'active',
+ operator: 'equals',
+ value: true
+ }
+ }
+ }
+ }
+
+ mockWorkflow.nodes = [node]
+ const executor = new WorkflowExecutor(mockWorkflow)
+ const result = await executor.execute({ startNodeId: nodeId })
+
+ // Filter node should execute (completed or failed both acceptable for integration test)
+ expect(['completed', 'failed']).toContain(result.status)
+ expect(result.completedAt).toBeDefined()
+ })
+ })
+
+ describe('Multi-Node Workflow Execution', () => {
+ it('should execute a simple linear workflow', async () => {
+ const triggerId = uuidv4()
+ const actionId = uuidv4()
+
+ const triggerNode: WorkflowNode = {
+ id: triggerId,
+ type: 'trigger',
+ position: { x: 0, y: 0 },
+ data: {
+ label: 'Manual Trigger',
+ nodeType: NodeType.TRIGGER,
+ triggerType: TriggerType.MANUAL,
+ config: {}
+ }
+ }
+
+ const actionNode: WorkflowNode = {
+ id: actionId,
+ type: 'action',
+ position: { x: 0, y: 100 },
+ data: {
+ label: 'Transform Data',
+ nodeType: NodeType.ACTION,
+ actionType: ActionType.TRANSFORM,
+ config: {
+ operation: 'map',
+ language: 'javascript',
+ script: 'return { ...item, processed: true }'
+ }
+ }
+ }
+
+ const edge: WorkflowEdge = {
+ id: uuidv4(),
+ source: triggerId,
+ target: actionId
+ }
+
+ mockWorkflow.nodes = [triggerNode, actionNode]
+ mockWorkflow.edges = [edge]
+
+ const executor = new WorkflowExecutor(mockWorkflow)
+ const result = await executor.execute()
+
+ expect(result.status).toBe('completed')
+ expect(result.nodeOutputs[triggerId]).toBeDefined()
+ expect(result.nodeOutputs[actionId]).toBeDefined()
+
+ // Should have logs for both nodes
+ const triggerLogs = result.logs.filter(log => log.nodeId === triggerId)
+ const actionLogs = result.logs.filter(log => log.nodeId === actionId)
+ expect(triggerLogs.length).toBeGreaterThan(0)
+ expect(actionLogs.length).toBeGreaterThan(0)
+ })
+
+ it('should execute a workflow with conditional branching', async () => {
+ const triggerId = uuidv4()
+ const ifNodeId = uuidv4()
+ const trueActionId = uuidv4()
+ const falseActionId = uuidv4()
+
+ const triggerNode: WorkflowNode = {
+ id: triggerId,
+ type: 'trigger',
+ position: { x: 0, y: 0 },
+ data: {
+ label: 'Manual Trigger',
+ nodeType: NodeType.TRIGGER,
+ triggerType: TriggerType.MANUAL,
+ config: {}
+ }
+ }
+
+ const ifNode: WorkflowNode = {
+ id: ifNodeId,
+ type: 'logic',
+ position: { x: 0, y: 100 },
+ data: {
+ label: 'IF Condition',
+ nodeType: NodeType.LOGIC,
+ logicType: LogicType.IF,
+ config: {
+ condition: {
+ field: 'shouldProcess',
+ operator: 'equals',
+ value: true
+ }
+ }
+ }
+ }
+
+ const trueActionNode: WorkflowNode = {
+ id: trueActionId,
+ type: 'action',
+ position: { x: -100, y: 200 },
+ data: {
+ label: 'Process Data',
+ nodeType: NodeType.ACTION,
+ actionType: ActionType.TRANSFORM,
+ config: {
+ operation: 'map',
+ language: 'javascript',
+ script: 'return { ...item, processed: true }'
+ }
+ }
+ }
+
+ const falseActionNode: WorkflowNode = {
+ id: falseActionId,
+ type: 'action',
+ position: { x: 100, y: 200 },
+ data: {
+ label: 'Skip Processing',
+ nodeType: NodeType.ACTION,
+ actionType: ActionType.DELAY,
+ config: {
+ delayType: 'fixed',
+ value: 0.001,
+ unit: 'seconds',
+ passthrough: true
+ }
+ }
+ }
+
+ const edges: WorkflowEdge[] = [
+ { id: uuidv4(), source: triggerId, target: ifNodeId },
+ { id: uuidv4(), source: ifNodeId, target: trueActionId, sourceHandle: 'true' },
+ { id: uuidv4(), source: ifNodeId, target: falseActionId, sourceHandle: 'false' }
+ ]
+
+ mockWorkflow.nodes = [triggerNode, ifNode, trueActionNode, falseActionNode]
+ mockWorkflow.edges = edges
+
+ const executor = new WorkflowExecutor(mockWorkflow)
+ const result = await executor.execute()
+
+ expect(result.status).toBe('completed')
+ expect(result.nodeOutputs[triggerId]).toBeDefined()
+ expect(result.nodeOutputs[ifNodeId]).toBeDefined()
+
+ // One of the action nodes should have executed based on the condition
+ const trueActionExecuted = result.nodeOutputs[trueActionId] !== undefined
+ const falseActionExecuted = result.nodeOutputs[falseActionId] !== undefined
+
+ // At least one should have executed (depending on condition evaluation)
+ expect(trueActionExecuted || falseActionExecuted).toBe(true)
+ })
+ })
+
+ describe('Error Handling', () => {
+ it('should handle workflow with no trigger nodes', async () => {
+ const actionNode: WorkflowNode = {
+ id: uuidv4(),
+ type: 'action',
+ position: { x: 0, y: 0 },
+ data: {
+ label: 'Orphaned Action',
+ nodeType: NodeType.ACTION,
+ actionType: ActionType.TRANSFORM,
+ config: {
+ operation: 'map',
+ language: 'javascript',
+ script: 'return item'
+ }
+ }
+ }
+
+ mockWorkflow.nodes = [actionNode]
+ const executor = new WorkflowExecutor(mockWorkflow)
+ const result = await executor.execute()
+
+ expect(result.status).toBe('failed')
+ expect(result.error).toContain('No trigger nodes found')
+ })
+
+ it('should handle invalid node configuration', async () => {
+ const nodeId = uuidv4()
+ const node: WorkflowNode = {
+ id: nodeId,
+ type: 'action',
+ position: { x: 0, y: 0 },
+ data: {
+ label: 'Invalid HTTP',
+ nodeType: NodeType.ACTION,
+ actionType: ActionType.HTTP,
+ config: {
+ // Missing required URL field
+ method: 'GET'
+ }
+ }
+ }
+
+ mockWorkflow.nodes = [node]
+ const executor = new WorkflowExecutor(mockWorkflow)
+ const result = await executor.execute({ startNodeId: nodeId })
+
+ expect(result.status).toBe('failed')
+ expect(result.error).toBeDefined()
+ })
+ })
+
+ describe('Workflow Cancellation', () => {
+ it('should handle workflow cancellation', async () => {
+ const nodeId = uuidv4()
+ const node: WorkflowNode = {
+ id: nodeId,
+ type: 'trigger',
+ position: { x: 0, y: 0 },
+ data: {
+ label: 'Manual Trigger', // Use manual trigger for simpler cancellation test
+ nodeType: NodeType.TRIGGER,
+ triggerType: TriggerType.MANUAL,
+ config: {}
+ }
+ }
+
+ mockWorkflow.nodes = [node]
+ const executor = new WorkflowExecutor(mockWorkflow)
+
+ // Test the stop functionality
+ const executionPromise = executor.execute()
+ executor.stop() // Immediately stop
+
+ const result = await executionPromise
+
+ expect(['cancelled', 'completed']).toContain(result.status)
+ expect(result.completedAt).toBeDefined()
+ })
+ })
+})
diff --git a/tests/workflow-id-validation.test.ts b/tests/workflow-id-validation.test.ts
new file mode 100644
index 0000000..fb7e516
--- /dev/null
+++ b/tests/workflow-id-validation.test.ts
@@ -0,0 +1,117 @@
+import { describe, it, expect } from 'vitest'
+import { isValidWorkflowId } from '@/lib/workflow-id-validation'
+
+/**
+ * Test the workflowId validation regex pattern
+ * This ensures our browser-compatible regex maintains the same validation rules
+ */
+
+// Use the production validator to ensure we test real behavior
+
+describe('WorkflowId Validation', () => {
+ describe('Valid inputs', () => {
+ it('should accept identifiers 3–64 chars long (alphanumeric, "-" or "_")', () => {
+ expect(isValidWorkflowId('abc')).toBe(true)
+ expect(isValidWorkflowId('ABC')).toBe(true)
+ expect(isValidWorkflowId('123')).toBe(true)
+ expect(isValidWorkflowId('a1b')).toBe(true)
+ expect(isValidWorkflowId('A1b')).toBe(true)
+ expect(isValidWorkflowId('my_id-123')).toBe(true)
+ expect(isValidWorkflowId('a'.repeat(64))).toBe(true)
+ })
+
+ it('should accept alphanumeric strings', () => {
+ expect(isValidWorkflowId('workflow')).toBe(true)
+ expect(isValidWorkflowId('myWorkflow123')).toBe(true)
+ expect(isValidWorkflowId('test123')).toBe(true)
+ })
+
+ it('should accept strings with single underscores or hyphens', () => {
+ expect(isValidWorkflowId('my_workflow')).toBe(true)
+ expect(isValidWorkflowId('my-workflow')).toBe(true)
+ expect(isValidWorkflowId('work_flow_123')).toBe(true)
+ expect(isValidWorkflowId('work-flow-123')).toBe(true)
+ })
+
+ it('should accept mixed valid characters', () => {
+ expect(isValidWorkflowId('my_work-flow123')).toBe(true)
+ expect(isValidWorkflowId('test_123-abc')).toBe(true)
+ })
+ })
+
+ describe('Invalid inputs', () => {
+ it('should reject empty or whitespace-only strings', () => {
+ expect(isValidWorkflowId('')).toBe(false)
+ expect(isValidWorkflowId(' ')).toBe(false)
+ expect(isValidWorkflowId('\t')).toBe(false)
+ })
+
+ it('should reject strings that are too short (< 3 characters)', () => {
+ expect(isValidWorkflowId('a')).toBe(false)
+ expect(isValidWorkflowId('12')).toBe(false)
+ expect(isValidWorkflowId('ab')).toBe(false)
+ })
+
+ it('should reject strings that are too long (> 64 characters)', () => {
+ const longString = 'a'.repeat(65) // 65 characters
+ expect(isValidWorkflowId(longString)).toBe(false)
+ })
+
+ it('should reject strings starting with underscore or hyphen', () => {
+ expect(isValidWorkflowId('_workflow')).toBe(false)
+ expect(isValidWorkflowId('-workflow')).toBe(false)
+ expect(isValidWorkflowId('_test123')).toBe(false)
+ expect(isValidWorkflowId('-test123')).toBe(false)
+ })
+
+ it('should reject strings ending with underscore or hyphen', () => {
+ expect(isValidWorkflowId('workflow_')).toBe(false)
+ expect(isValidWorkflowId('workflow-')).toBe(false)
+ expect(isValidWorkflowId('test123_')).toBe(false)
+ expect(isValidWorkflowId('test123-')).toBe(false)
+ })
+
+ it('should reject consecutive underscores or hyphens', () => {
+ expect(isValidWorkflowId('work__flow')).toBe(false)
+ expect(isValidWorkflowId('work--flow')).toBe(false)
+ expect(isValidWorkflowId('work_-flow')).toBe(false)
+ expect(isValidWorkflowId('work-_flow')).toBe(false)
+ expect(isValidWorkflowId('work___flow')).toBe(false)
+ expect(isValidWorkflowId('work---flow')).toBe(false)
+ })
+
+ it('should reject invalid characters', () => {
+ expect(isValidWorkflowId('work@flow')).toBe(false)
+ expect(isValidWorkflowId('work.flow')).toBe(false)
+ expect(isValidWorkflowId('work flow')).toBe(false)
+ expect(isValidWorkflowId('work+flow')).toBe(false)
+ expect(isValidWorkflowId('work/flow')).toBe(false)
+ expect(isValidWorkflowId('work\\flow')).toBe(false)
+ })
+
+ it('should reject reserved names (case-insensitive)', () => {
+ expect(isValidWorkflowId('api')).toBe(false)
+ expect(isValidWorkflowId('API')).toBe(false)
+ expect(isValidWorkflowId('App')).toBe(false)
+ expect(isValidWorkflowId('admin')).toBe(false)
+ expect(isValidWorkflowId('test')).toBe(false)
+ expect(isValidWorkflowId('TEST')).toBe(false)
+ expect(isValidWorkflowId('null')).toBe(false)
+ expect(isValidWorkflowId('undefined')).toBe(false)
+ })
+ })
+
+ describe('Edge cases', () => {
+ it('should handle trimming correctly', () => {
+ expect(isValidWorkflowId(' valid ')).toBe(true)
+ expect(isValidWorkflowId(' valid123 ')).toBe(true)
+ expect(isValidWorkflowId(' _invalid ')).toBe(false)
+ })
+
+ it('should handle mixed case correctly', () => {
+ expect(isValidWorkflowId('MyWorkFlow123')).toBe(true)
+ expect(isValidWorkflowId('MY_WORK_FLOW')).toBe(true)
+ expect(isValidWorkflowId('my-Work-Flow')).toBe(true)
+ })
+ })
+})
diff --git a/types/credentials.ts b/types/credentials.ts
new file mode 100644
index 0000000..f9675fd
--- /dev/null
+++ b/types/credentials.ts
@@ -0,0 +1,29 @@
+/**
+ * Credential type constants and type definitions
+ * Single source of truth for all credential-related types
+ */
+
+// Define the credential types as a const assertion for literal types
+export const CREDENTIAL_TYPES = ['database', 'api', 'email', 'generic'] as const
+
+// Extract the union type from the const array
+export type CredentialType = typeof CREDENTIAL_TYPES[number]
+
+/**
+ * Type guard to check if a value is a valid credential type
+ * @param value - The value to check
+ * @returns True if the value is a valid credential type
+ */
+export function isValidCredentialType(value: unknown): value is CredentialType {
+ return typeof value === 'string' && (CREDENTIAL_TYPES as readonly string[]).includes(value)
+}
+
+/**
+ * Safely convert a string to a credential type with fallback
+ * @param value - The value to convert
+ * @param fallback - The fallback value if conversion fails (defaults to 'generic')
+ * @returns A valid credential type
+ */
+export function toCredentialType(value: unknown, fallback: CredentialType = 'generic'): CredentialType {
+ return isValidCredentialType(value) ? value : fallback
+}