| undefined
if (variablesResponse.ok) {
const variablesData = await variablesResponse.json()
- workflowVariables = Object.values(variablesData?.data || {}).map((v: any) => ({
- id: v.id,
- name: v.name,
- type: v.type,
- value: v.value,
- }))
+ workflowVariables = variablesData?.data
}
workflowsToExport.push({
@@ -101,15 +98,13 @@ export function useExportWorkspace({ onSuccess }: UseExportWorkspaceProps = {})
}
}
- const foldersToExport: Array<{
- id: string
- name: string
- parentId: string | null
- }> = (foldersData.folders || []).map((folder: any) => ({
- id: folder.id,
- name: folder.name,
- parentId: folder.parentId,
- }))
+ const foldersToExport: FolderExportData[] = (foldersData.folders || []).map(
+ (folder: FolderExportData) => ({
+ id: folder.id,
+ name: folder.name,
+ parentId: folder.parentId,
+ })
+ )
const zipBlob = await exportWorkspaceToZip(
workspaceName,
diff --git a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workflow.ts b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workflow.ts
index 00c46a00a3..d4f294e7f1 100644
--- a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workflow.ts
+++ b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workflow.ts
@@ -79,21 +79,36 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
body: JSON.stringify(workflowData),
})
- // Save variables if any
- if (workflowData.variables && workflowData.variables.length > 0) {
- const variablesPayload = workflowData.variables.map((v: any) => ({
- id: typeof v.id === 'string' && v.id.trim() ? v.id : crypto.randomUUID(),
- workflowId: newWorkflowId,
- name: v.name,
- type: v.type,
- value: v.value,
- }))
-
- await fetch(`/api/workflows/${newWorkflowId}/variables`, {
- method: 'POST',
- headers: { 'Content-Type': 'application/json' },
- body: JSON.stringify({ variables: variablesPayload }),
- })
+ // Save variables if any (handle both legacy Array and current Record formats)
+ if (workflowData.variables) {
+ // Convert to Record format for API (handles backwards compatibility with old Array exports)
+ const variablesArray = Array.isArray(workflowData.variables)
+ ? workflowData.variables
+ : Object.values(workflowData.variables)
+
+ if (variablesArray.length > 0) {
+ const variablesRecord: Record<
+ string,
+ { id: string; workflowId: string; name: string; type: string; value: unknown }
+ > = {}
+
+ for (const v of variablesArray) {
+ const id = typeof v.id === 'string' && v.id.trim() ? v.id : crypto.randomUUID()
+ variablesRecord[id] = {
+ id,
+ workflowId: newWorkflowId,
+ name: v.name,
+ type: v.type,
+ value: v.value,
+ }
+ }
+
+ await fetch(`/api/workflows/${newWorkflowId}/variables`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({ variables: variablesRecord }),
+ })
+ }
}
logger.info(`Imported workflow: ${workflowName}`)
diff --git a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workspace.ts b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workspace.ts
index b71487734b..1ad051307b 100644
--- a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workspace.ts
+++ b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workspace.ts
@@ -159,21 +159,36 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
continue
}
- // Save variables if any
- if (workflowData.variables && workflowData.variables.length > 0) {
- const variablesPayload = workflowData.variables.map((v: any) => ({
- id: typeof v.id === 'string' && v.id.trim() ? v.id : crypto.randomUUID(),
- workflowId: newWorkflow.id,
- name: v.name,
- type: v.type,
- value: v.value,
- }))
-
- await fetch(`/api/workflows/${newWorkflow.id}/variables`, {
- method: 'POST',
- headers: { 'Content-Type': 'application/json' },
- body: JSON.stringify({ variables: variablesPayload }),
- })
+ // Save variables if any (handle both legacy Array and current Record formats)
+ if (workflowData.variables) {
+ // Convert to Record format for API (handles backwards compatibility with old Array exports)
+ const variablesArray = Array.isArray(workflowData.variables)
+ ? workflowData.variables
+ : Object.values(workflowData.variables)
+
+ if (variablesArray.length > 0) {
+ const variablesRecord: Record<
+ string,
+ { id: string; workflowId: string; name: string; type: string; value: unknown }
+ > = {}
+
+ for (const v of variablesArray) {
+ const id = typeof v.id === 'string' && v.id.trim() ? v.id : crypto.randomUUID()
+ variablesRecord[id] = {
+ id,
+ workflowId: newWorkflow.id,
+ name: v.name,
+ type: v.type,
+ value: v.value,
+ }
+ }
+
+ await fetch(`/api/workflows/${newWorkflow.id}/variables`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({ variables: variablesRecord }),
+ })
+ }
}
logger.info(`Imported workflow: ${workflowName}`)
diff --git a/apps/sim/components/emails/components/email-footer.tsx b/apps/sim/components/emails/components/email-footer.tsx
index 76ef355ee3..1e6f7bf424 100644
--- a/apps/sim/components/emails/components/email-footer.tsx
+++ b/apps/sim/components/emails/components/email-footer.tsx
@@ -112,7 +112,7 @@ export function EmailFooter({ baseUrl = getBaseUrl(), unsubscribe, messageId }:
|
{brand.name}
- {isHosted && <>, 80 Langton St, San Francisco, CA 94133, USA>}
+ {isHosted && <>, 80 Langton St, San Francisco, CA 94103, USA>}
|
diff --git a/apps/sim/executor/__test-utils__/executor-mocks.ts b/apps/sim/executor/__test-utils__/executor-mocks.ts
index 052a861988..efe146ac56 100644
--- a/apps/sim/executor/__test-utils__/executor-mocks.ts
+++ b/apps/sim/executor/__test-utils__/executor-mocks.ts
@@ -427,9 +427,7 @@ export const createWorkflowWithResponse = (): SerializedWorkflow => ({
input: 'json',
},
outputs: {
- response: {
- input: 'json',
- },
+ response: { type: 'json', description: 'Input response' },
},
enabled: true,
metadata: { id: 'starter', name: 'Starter Block' },
@@ -444,11 +442,9 @@ export const createWorkflowWithResponse = (): SerializedWorkflow => ({
headers: 'json',
},
outputs: {
- response: {
- data: 'json',
- status: 'number',
- headers: 'json',
- },
+ data: { type: 'json', description: 'Response data' },
+ status: { type: 'number', description: 'Response status' },
+ headers: { type: 'json', description: 'Response headers' },
},
enabled: true,
metadata: { id: 'response', name: 'Response Block' },
diff --git a/apps/sim/executor/constants.ts b/apps/sim/executor/constants.ts
index f483bbfc78..7a5d06f405 100644
--- a/apps/sim/executor/constants.ts
+++ b/apps/sim/executor/constants.ts
@@ -1,3 +1,5 @@
+import type { LoopType, ParallelType } from '@/lib/workflows/types'
+
export enum BlockType {
PARALLEL = 'parallel',
LOOP = 'loop',
@@ -40,12 +42,8 @@ export const METADATA_ONLY_BLOCK_TYPES = [
BlockType.NOTE,
] as const
-export type LoopType = 'for' | 'forEach' | 'while' | 'doWhile'
-
export type SentinelType = 'start' | 'end'
-export type ParallelType = 'collection' | 'count'
-
export const EDGE = {
CONDITION_PREFIX: 'condition-',
CONDITION_TRUE: 'condition-true',
diff --git a/apps/sim/executor/handlers/router/router-handler.ts b/apps/sim/executor/handlers/router/router-handler.ts
index b00cc0f6ea..d702a1b80f 100644
--- a/apps/sim/executor/handlers/router/router-handler.ts
+++ b/apps/sim/executor/handlers/router/router-handler.ts
@@ -366,12 +366,12 @@ export class RouterBlockHandler implements BlockHandler {
let systemPrompt = ''
if (isAgentBlockType(targetBlock.metadata?.id)) {
+ const paramsPrompt = targetBlock.config?.params?.systemPrompt
+ const inputsPrompt = targetBlock.inputs?.systemPrompt
systemPrompt =
- targetBlock.config?.params?.systemPrompt || targetBlock.inputs?.systemPrompt || ''
-
- if (!systemPrompt && targetBlock.inputs) {
- systemPrompt = targetBlock.inputs.systemPrompt || ''
- }
+ (typeof paramsPrompt === 'string' ? paramsPrompt : '') ||
+ (typeof inputsPrompt === 'string' ? inputsPrompt : '') ||
+ ''
}
return {
diff --git a/apps/sim/hooks/queries/subscription.ts b/apps/sim/hooks/queries/subscription.ts
index 89ded91231..b0e40ef6c9 100644
--- a/apps/sim/hooks/queries/subscription.ts
+++ b/apps/sim/hooks/queries/subscription.ts
@@ -28,6 +28,8 @@ async function fetchSubscriptionData(includeOrg = false) {
interface UseSubscriptionDataOptions {
/** Include organization membership and role data */
includeOrg?: boolean
+ /** Whether to enable the query (defaults to true) */
+ enabled?: boolean
}
/**
@@ -35,13 +37,14 @@ interface UseSubscriptionDataOptions {
* @param options - Optional configuration
*/
export function useSubscriptionData(options: UseSubscriptionDataOptions = {}) {
- const { includeOrg = false } = options
+ const { includeOrg = false, enabled = true } = options
return useQuery({
queryKey: subscriptionKeys.user(includeOrg),
queryFn: () => fetchSubscriptionData(includeOrg),
staleTime: 30 * 1000,
placeholderData: keepPreviousData,
+ enabled,
})
}
@@ -58,17 +61,25 @@ async function fetchUsageLimitData() {
return response.json()
}
+interface UseUsageLimitDataOptions {
+ /** Whether to enable the query (defaults to true) */
+ enabled?: boolean
+}
+
/**
* Hook to fetch usage limit metadata
* Returns: currentLimit, minimumLimit, canEdit, plan, updatedAt
* Use this for editing usage limits, not for displaying current usage
*/
-export function useUsageLimitData() {
+export function useUsageLimitData(options: UseUsageLimitDataOptions = {}) {
+ const { enabled = true } = options
+
return useQuery({
queryKey: subscriptionKeys.usage(),
queryFn: fetchUsageLimitData,
staleTime: 30 * 1000,
placeholderData: keepPreviousData,
+ enabled,
})
}
diff --git a/apps/sim/hooks/use-code-viewer.ts b/apps/sim/hooks/use-code-viewer.ts
new file mode 100644
index 0000000000..52d0300970
--- /dev/null
+++ b/apps/sim/hooks/use-code-viewer.ts
@@ -0,0 +1,155 @@
+'use client'
+
+import { useCallback, useEffect, useRef, useState } from 'react'
+
+interface UseCodeViewerFeaturesOptions {
+ /** Reference to the content container for scroll-to-match functionality */
+ contentRef?: React.RefObject
+ /** Initial wrap text state (ignored if externalWrapText is provided) */
+ initialWrapText?: boolean
+ /** External wrap text state (e.g., from Zustand store) */
+ externalWrapText?: boolean
+ /** External setter for wrap text (required if externalWrapText is provided) */
+ onWrapTextChange?: (wrap: boolean) => void
+ /** Callback when escape is pressed (optional, for custom handling) */
+ onEscape?: () => void
+}
+
+interface UseCodeViewerFeaturesReturn {
+ wrapText: boolean
+ setWrapText: (wrap: boolean) => void
+ toggleWrapText: () => void
+
+ isSearchActive: boolean
+ searchQuery: string
+ setSearchQuery: (query: string) => void
+ matchCount: number
+ currentMatchIndex: number
+ activateSearch: () => void
+ closeSearch: () => void
+ goToNextMatch: () => void
+ goToPreviousMatch: () => void
+ handleMatchCountChange: (count: number) => void
+ searchInputRef: React.RefObject
+}
+
+/**
+ * Reusable hook for Code.Viewer features: search and wrap text functionality.
+ * Supports both internal state and external state (e.g., from Zustand) for wrapText.
+ */
+export function useCodeViewerFeatures(
+ options: UseCodeViewerFeaturesOptions = {}
+): UseCodeViewerFeaturesReturn {
+ const {
+ contentRef,
+ initialWrapText = true,
+ externalWrapText,
+ onWrapTextChange,
+ onEscape,
+ } = options
+
+ // Use external state if provided, otherwise use internal state
+ const [internalWrapText, setInternalWrapText] = useState(initialWrapText)
+ const wrapText = externalWrapText !== undefined ? externalWrapText : internalWrapText
+ const setWrapText = onWrapTextChange ?? setInternalWrapText
+
+ const [isSearchActive, setIsSearchActive] = useState(false)
+ const [searchQuery, setSearchQuery] = useState('')
+ const [matchCount, setMatchCount] = useState(0)
+ const [currentMatchIndex, setCurrentMatchIndex] = useState(0)
+ const searchInputRef = useRef(null)
+
+ const toggleWrapText = useCallback(() => {
+ setWrapText(!wrapText)
+ }, [wrapText, setWrapText])
+
+ const activateSearch = useCallback(() => {
+ setIsSearchActive(true)
+ setTimeout(() => {
+ searchInputRef.current?.focus()
+ }, 0)
+ }, [])
+
+ const closeSearch = useCallback(() => {
+ setIsSearchActive(false)
+ setSearchQuery('')
+ setMatchCount(0)
+ setCurrentMatchIndex(0)
+ }, [])
+
+ const goToNextMatch = useCallback(() => {
+ if (matchCount === 0) return
+ setCurrentMatchIndex((prev) => (prev + 1) % matchCount)
+ }, [matchCount])
+
+ const goToPreviousMatch = useCallback(() => {
+ if (matchCount === 0) return
+ setCurrentMatchIndex((prev) => (prev - 1 + matchCount) % matchCount)
+ }, [matchCount])
+
+ const handleMatchCountChange = useCallback((count: number) => {
+ setMatchCount(count)
+ setCurrentMatchIndex(0)
+ }, [])
+
+ useEffect(() => {
+ const handleKeyDown = (e: KeyboardEvent) => {
+ if (e.key === 'Escape' && isSearchActive) {
+ e.preventDefault()
+ closeSearch()
+ onEscape?.()
+ }
+ }
+
+ window.addEventListener('keydown', handleKeyDown)
+ return () => window.removeEventListener('keydown', handleKeyDown)
+ }, [isSearchActive, closeSearch, onEscape])
+
+ useEffect(() => {
+ const handleKeyDown = (e: KeyboardEvent) => {
+ if (!isSearchActive) return
+
+ const isSearchInputFocused = document.activeElement === searchInputRef.current
+
+ if (e.key === 'Enter' && isSearchInputFocused && matchCount > 0) {
+ e.preventDefault()
+ if (e.shiftKey) {
+ goToPreviousMatch()
+ } else {
+ goToNextMatch()
+ }
+ }
+ }
+
+ window.addEventListener('keydown', handleKeyDown)
+ return () => window.removeEventListener('keydown', handleKeyDown)
+ }, [isSearchActive, matchCount, goToNextMatch, goToPreviousMatch])
+
+ useEffect(() => {
+ if (!isSearchActive || matchCount === 0 || !contentRef?.current) return
+
+ const matchElements = contentRef.current.querySelectorAll('[data-search-match]')
+ const currentElement = matchElements[currentMatchIndex]
+
+ if (currentElement) {
+ currentElement.scrollIntoView({ block: 'center' })
+ }
+ }, [currentMatchIndex, isSearchActive, matchCount, contentRef])
+
+ return {
+ wrapText,
+ setWrapText,
+ toggleWrapText,
+ isSearchActive,
+ searchQuery,
+ setSearchQuery,
+ matchCount,
+ currentMatchIndex,
+ activateSearch,
+ closeSearch,
+ goToNextMatch,
+ goToPreviousMatch,
+ handleMatchCountChange,
+ searchInputRef,
+ }
+}
diff --git a/apps/sim/hooks/use-collaborative-workflow.ts b/apps/sim/hooks/use-collaborative-workflow.ts
index 4df0e00f40..ba6fda4e1c 100644
--- a/apps/sim/hooks/use-collaborative-workflow.ts
+++ b/apps/sim/hooks/use-collaborative-workflow.ts
@@ -6,6 +6,17 @@ import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
import { useSocket } from '@/app/workspace/providers/socket-provider'
import { getBlock } from '@/blocks'
import { useUndoRedo } from '@/hooks/use-undo-redo'
+import {
+ BLOCK_OPERATIONS,
+ BLOCKS_OPERATIONS,
+ EDGE_OPERATIONS,
+ EDGES_OPERATIONS,
+ OPERATION_TARGETS,
+ SUBBLOCK_OPERATIONS,
+ SUBFLOW_OPERATIONS,
+ VARIABLE_OPERATIONS,
+ WORKFLOW_OPERATIONS,
+} from '@/socket/constants'
import { useNotificationStore } from '@/stores/notifications'
import { registerEmitFunctions, useOperationQueue } from '@/stores/operation-queue/store'
import { usePanelEditorStore } from '@/stores/panel/editor/store'
@@ -20,8 +31,6 @@ import type { BlockState, Loop, Parallel, Position } from '@/stores/workflows/wo
const logger = createLogger('CollaborativeWorkflow')
-const WEBHOOK_SUBBLOCK_FIELDS = ['webhookId', 'triggerPath']
-
export function useCollaborativeWorkflow() {
const undoRedo = useUndoRedo()
const isUndoRedoInProgress = useRef(false)
@@ -33,7 +42,7 @@ export function useCollaborativeWorkflow() {
const { blockId, before, after } = e.detail || {}
if (!blockId || !before || !after) return
if (isUndoRedoInProgress.current) return
- undoRedo.recordMove(blockId, before, after)
+ undoRedo.recordBatchMoveBlocks([{ blockId, before, after }])
}
const parentUpdateHandler = (e: any) => {
@@ -197,9 +206,9 @@ export function useCollaborativeWorkflow() {
isApplyingRemoteChange.current = true
try {
- if (target === 'block') {
+ if (target === OPERATION_TARGETS.BLOCK) {
switch (operation) {
- case 'update-position': {
+ case BLOCK_OPERATIONS.UPDATE_POSITION: {
const blockId = payload.id
if (!data.timestamp) {
@@ -227,22 +236,22 @@ export function useCollaborativeWorkflow() {
}
break
}
- case 'update-name':
+ case BLOCK_OPERATIONS.UPDATE_NAME:
workflowStore.updateBlockName(payload.id, payload.name)
break
- case 'toggle-enabled':
+ case BLOCK_OPERATIONS.TOGGLE_ENABLED:
workflowStore.toggleBlockEnabled(payload.id)
break
- case 'update-parent':
+ case BLOCK_OPERATIONS.UPDATE_PARENT:
workflowStore.updateParentId(payload.id, payload.parentId, payload.extent)
break
- case 'update-advanced-mode':
+ case BLOCK_OPERATIONS.UPDATE_ADVANCED_MODE:
workflowStore.setBlockAdvancedMode(payload.id, payload.advancedMode)
break
- case 'update-trigger-mode':
+ case BLOCK_OPERATIONS.UPDATE_TRIGGER_MODE:
workflowStore.setBlockTriggerMode(payload.id, payload.triggerMode)
break
- case 'toggle-handles': {
+ case BLOCK_OPERATIONS.TOGGLE_HANDLES: {
const currentBlock = workflowStore.blocks[payload.id]
if (currentBlock && currentBlock.horizontalHandles !== payload.horizontalHandles) {
workflowStore.toggleBlockHandles(payload.id)
@@ -250,9 +259,9 @@ export function useCollaborativeWorkflow() {
break
}
}
- } else if (target === 'blocks') {
+ } else if (target === OPERATION_TARGETS.BLOCKS) {
switch (operation) {
- case 'batch-update-positions': {
+ case BLOCKS_OPERATIONS.BATCH_UPDATE_POSITIONS: {
const { updates } = payload
if (Array.isArray(updates)) {
updates.forEach(({ id, position }: { id: string; position: Position }) => {
@@ -264,12 +273,12 @@ export function useCollaborativeWorkflow() {
break
}
}
- } else if (target === 'edge') {
+ } else if (target === OPERATION_TARGETS.EDGE) {
switch (operation) {
- case 'add':
+ case EDGE_OPERATIONS.ADD:
workflowStore.addEdge(payload as Edge)
break
- case 'remove': {
+ case EDGE_OPERATIONS.REMOVE: {
workflowStore.removeEdge(payload.id)
const updatedBlocks = useWorkflowStore.getState().blocks
@@ -290,9 +299,44 @@ export function useCollaborativeWorkflow() {
break
}
}
- } else if (target === 'subflow') {
+ } else if (target === OPERATION_TARGETS.EDGES) {
+ switch (operation) {
+ case EDGES_OPERATIONS.BATCH_REMOVE_EDGES: {
+ const { ids } = payload
+ if (Array.isArray(ids)) {
+ ids.forEach((id: string) => {
+ workflowStore.removeEdge(id)
+ })
+
+ const updatedBlocks = useWorkflowStore.getState().blocks
+ const updatedEdges = useWorkflowStore.getState().edges
+ const graph = {
+ blocksById: updatedBlocks,
+ edgesById: Object.fromEntries(updatedEdges.map((e) => [e.id, e])),
+ }
+
+ const undoRedoStore = useUndoRedoStore.getState()
+ const stackKeys = Object.keys(undoRedoStore.stacks)
+ stackKeys.forEach((key) => {
+ const [wfId, uId] = key.split(':')
+ if (wfId === activeWorkflowId) {
+ undoRedoStore.pruneInvalidEntries(wfId, uId, graph)
+ }
+ })
+ }
+ break
+ }
+ case EDGES_OPERATIONS.BATCH_ADD_EDGES: {
+ const { edges } = payload
+ if (Array.isArray(edges)) {
+ edges.forEach((edge: Edge) => workflowStore.addEdge(edge))
+ }
+ break
+ }
+ }
+ } else if (target === OPERATION_TARGETS.SUBFLOW) {
switch (operation) {
- case 'update':
+ case SUBFLOW_OPERATIONS.UPDATE:
// Handle subflow configuration updates (loop/parallel type changes, etc.)
if (payload.type === 'loop') {
const { config } = payload
@@ -325,9 +369,9 @@ export function useCollaborativeWorkflow() {
}
break
}
- } else if (target === 'variable') {
+ } else if (target === OPERATION_TARGETS.VARIABLE) {
switch (operation) {
- case 'add':
+ case VARIABLE_OPERATIONS.ADD:
variablesStore.addVariable(
{
workflowId: payload.workflowId,
@@ -338,7 +382,7 @@ export function useCollaborativeWorkflow() {
payload.id
)
break
- case 'variable-update':
+ case VARIABLE_OPERATIONS.UPDATE:
if (payload.field === 'name') {
variablesStore.updateVariable(payload.variableId, { name: payload.value })
} else if (payload.field === 'value') {
@@ -347,13 +391,13 @@ export function useCollaborativeWorkflow() {
variablesStore.updateVariable(payload.variableId, { type: payload.value })
}
break
- case 'remove':
+ case VARIABLE_OPERATIONS.REMOVE:
variablesStore.deleteVariable(payload.variableId)
break
}
- } else if (target === 'workflow') {
+ } else if (target === OPERATION_TARGETS.WORKFLOW) {
switch (operation) {
- case 'replace-state':
+ case WORKFLOW_OPERATIONS.REPLACE_STATE:
if (payload.state) {
logger.info('Received workflow state replacement from remote user', {
userId,
@@ -386,9 +430,9 @@ export function useCollaborativeWorkflow() {
}
}
- if (target === 'blocks') {
+ if (target === OPERATION_TARGETS.BLOCKS) {
switch (operation) {
- case 'batch-add-blocks': {
+ case BLOCKS_OPERATIONS.BATCH_ADD_BLOCKS: {
const {
blocks,
edges,
@@ -456,7 +500,7 @@ export function useCollaborativeWorkflow() {
logger.info('Successfully applied batch-add-blocks from remote user')
break
}
- case 'batch-remove-blocks': {
+ case BLOCKS_OPERATIONS.BATCH_REMOVE_BLOCKS: {
const { ids } = payload
logger.info('Received batch-remove-blocks from remote user', {
userId,
@@ -722,7 +766,12 @@ export function useCollaborativeWorkflow() {
)
const collaborativeBatchUpdatePositions = useCallback(
- (updates: Array<{ id: string; position: Position }>) => {
+ (
+ updates: Array<{ id: string; position: Position }>,
+ options?: {
+ previousPositions?: Map
+ }
+ ) => {
if (!isInActiveRoom()) {
logger.debug('Skipping batch position update - not in active workflow')
return
@@ -735,8 +784,8 @@ export function useCollaborativeWorkflow() {
addToQueue({
id: operationId,
operation: {
- operation: 'batch-update-positions',
- target: 'blocks',
+ operation: BLOCKS_OPERATIONS.BATCH_UPDATE_POSITIONS,
+ target: OPERATION_TARGETS.BLOCKS,
payload: { updates },
},
workflowId: activeWorkflowId || '',
@@ -746,8 +795,31 @@ export function useCollaborativeWorkflow() {
updates.forEach(({ id, position }) => {
workflowStore.updateBlockPosition(id, position)
})
+
+ if (options?.previousPositions && options.previousPositions.size > 0) {
+ const moves = updates
+ .filter((u) => options.previousPositions!.has(u.id))
+ .map((u) => {
+ const prev = options.previousPositions!.get(u.id)!
+ const block = workflowStore.blocks[u.id]
+ return {
+ blockId: u.id,
+ before: prev,
+ after: {
+ x: u.position.x,
+ y: u.position.y,
+ parentId: block?.data?.parentId,
+ },
+ }
+ })
+ .filter((m) => m.before.x !== m.after.x || m.before.y !== m.after.y)
+
+ if (moves.length > 0) {
+ undoRedo.recordBatchMoveBlocks(moves)
+ }
+ }
},
- [addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, workflowStore]
+ [addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, workflowStore, undoRedo]
)
const collaborativeUpdateBlockName = useCallback(
@@ -781,65 +853,169 @@ export function useCollaborativeWorkflow() {
return { success: false, error: `Block name "${trimmedName}" already exists` }
}
- executeQueuedOperation('update-name', 'block', { id, name: trimmedName }, () => {
- const result = workflowStore.updateBlockName(id, trimmedName)
+ executeQueuedOperation(
+ BLOCK_OPERATIONS.UPDATE_NAME,
+ OPERATION_TARGETS.BLOCK,
+ { id, name: trimmedName },
+ () => {
+ const result = workflowStore.updateBlockName(id, trimmedName)
- if (result.success && result.changedSubblocks.length > 0) {
- logger.info('Emitting cascaded subblock updates from block rename', {
- blockId: id,
- newName: trimmedName,
- updateCount: result.changedSubblocks.length,
- })
+ if (result.success && result.changedSubblocks.length > 0) {
+ logger.info('Emitting cascaded subblock updates from block rename', {
+ blockId: id,
+ newName: trimmedName,
+ updateCount: result.changedSubblocks.length,
+ })
- result.changedSubblocks.forEach(
- ({
- blockId,
- subBlockId,
- newValue,
- }: {
- blockId: string
- subBlockId: string
- newValue: any
- }) => {
- const operationId = crypto.randomUUID()
- addToQueue({
- id: operationId,
- operation: {
- operation: 'subblock-update',
- target: 'subblock',
- payload: { blockId, subblockId: subBlockId, value: newValue },
- },
- workflowId: activeWorkflowId || '',
- userId: session?.user?.id || 'unknown',
- })
- }
- )
+ result.changedSubblocks.forEach(
+ ({
+ blockId,
+ subBlockId,
+ newValue,
+ }: {
+ blockId: string
+ subBlockId: string
+ newValue: any
+ }) => {
+ const operationId = crypto.randomUUID()
+ addToQueue({
+ id: operationId,
+ operation: {
+ operation: SUBBLOCK_OPERATIONS.UPDATE,
+ target: OPERATION_TARGETS.SUBBLOCK,
+ payload: { blockId, subblockId: subBlockId, value: newValue },
+ },
+ workflowId: activeWorkflowId || '',
+ userId: session?.user?.id || 'unknown',
+ })
+ }
+ )
+ }
}
- })
+ )
return { success: true }
},
[executeQueuedOperation, workflowStore, addToQueue, activeWorkflowId, session?.user?.id]
)
- const collaborativeToggleBlockEnabled = useCallback(
- (id: string) => {
- executeQueuedOperation('toggle-enabled', 'block', { id }, () =>
+ const collaborativeBatchToggleBlockEnabled = useCallback(
+ (ids: string[]) => {
+ if (ids.length === 0) return
+
+ const previousStates: Record = {}
+ const validIds: string[] = []
+
+ for (const id of ids) {
+ const block = workflowStore.blocks[id]
+ if (block) {
+ previousStates[id] = block.enabled
+ validIds.push(id)
+ }
+ }
+
+ if (validIds.length === 0) return
+
+ const operationId = crypto.randomUUID()
+
+ addToQueue({
+ id: operationId,
+ operation: {
+ operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED,
+ target: OPERATION_TARGETS.BLOCKS,
+ payload: { blockIds: validIds, previousStates },
+ },
+ workflowId: activeWorkflowId || '',
+ userId: session?.user?.id || 'unknown',
+ })
+
+ for (const id of validIds) {
workflowStore.toggleBlockEnabled(id)
- )
+ }
+
+ undoRedo.recordBatchToggleEnabled(validIds, previousStates)
},
- [executeQueuedOperation, workflowStore]
+ [addToQueue, activeWorkflowId, session?.user?.id, workflowStore, undoRedo]
)
const collaborativeUpdateParentId = useCallback(
(id: string, parentId: string, extent: 'parent') => {
- executeQueuedOperation('update-parent', 'block', { id, parentId, extent }, () =>
- workflowStore.updateParentId(id, parentId, extent)
+ executeQueuedOperation(
+ BLOCK_OPERATIONS.UPDATE_PARENT,
+ OPERATION_TARGETS.BLOCK,
+ { id, parentId, extent },
+ () => workflowStore.updateParentId(id, parentId, extent)
)
},
[executeQueuedOperation, workflowStore]
)
+ const collaborativeBatchUpdateParent = useCallback(
+ (
+ updates: Array<{
+ blockId: string
+ newParentId: string | null
+ newPosition: { x: number; y: number }
+ affectedEdges: Edge[]
+ }>
+ ) => {
+ if (!isInActiveRoom()) {
+ logger.debug('Skipping batch update parent - not in active workflow')
+ return
+ }
+
+ if (updates.length === 0) return
+
+ const batchUpdates = updates.map((u) => {
+ const block = workflowStore.blocks[u.blockId]
+ const oldParentId = block?.data?.parentId
+ const oldPosition = block?.position || { x: 0, y: 0 }
+
+ return {
+ blockId: u.blockId,
+ oldParentId,
+ newParentId: u.newParentId || undefined,
+ oldPosition,
+ newPosition: u.newPosition,
+ affectedEdges: u.affectedEdges,
+ }
+ })
+
+ for (const update of updates) {
+ if (update.affectedEdges.length > 0) {
+ update.affectedEdges.forEach((e) => workflowStore.removeEdge(e.id))
+ }
+ workflowStore.updateBlockPosition(update.blockId, update.newPosition)
+ if (update.newParentId) {
+ workflowStore.updateParentId(update.blockId, update.newParentId, 'parent')
+ }
+ }
+
+ undoRedo.recordBatchUpdateParent(batchUpdates)
+
+ const operationId = crypto.randomUUID()
+ addToQueue({
+ id: operationId,
+ operation: {
+ operation: BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT,
+ target: OPERATION_TARGETS.BLOCKS,
+ payload: {
+ updates: batchUpdates.map((u) => ({
+ id: u.blockId,
+ parentId: u.newParentId || '',
+ position: u.newPosition,
+ })),
+ },
+ },
+ workflowId: activeWorkflowId || '',
+ userId: session?.user?.id || 'unknown',
+ })
+
+ logger.debug('Batch updated parent for blocks', { updateCount: updates.length })
+ },
+ [isInActiveRoom, workflowStore, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
+ )
+
const collaborativeToggleBlockAdvancedMode = useCallback(
(id: string) => {
const currentBlock = workflowStore.blocks[id]
@@ -848,8 +1024,8 @@ export function useCollaborativeWorkflow() {
const newAdvancedMode = !currentBlock.advancedMode
executeQueuedOperation(
- 'update-advanced-mode',
- 'block',
+ BLOCK_OPERATIONS.UPDATE_ADVANCED_MODE,
+ OPERATION_TARGETS.BLOCK,
{ id, advancedMode: newAdvancedMode },
() => workflowStore.toggleBlockAdvancedMode(id)
)
@@ -879,8 +1055,8 @@ export function useCollaborativeWorkflow() {
}
executeQueuedOperation(
- 'update-trigger-mode',
- 'block',
+ BLOCK_OPERATIONS.UPDATE_TRIGGER_MODE,
+ OPERATION_TARGETS.BLOCK,
{ id, triggerMode: newTriggerMode },
() => workflowStore.toggleBlockTriggerMode(id)
)
@@ -888,27 +1064,50 @@ export function useCollaborativeWorkflow() {
[executeQueuedOperation, workflowStore]
)
- const collaborativeToggleBlockHandles = useCallback(
- (id: string) => {
- const currentBlock = workflowStore.blocks[id]
- if (!currentBlock) return
+ const collaborativeBatchToggleBlockHandles = useCallback(
+ (ids: string[]) => {
+ if (ids.length === 0) return
- const newHorizontalHandles = !currentBlock.horizontalHandles
+ const previousStates: Record = {}
+ const validIds: string[] = []
- executeQueuedOperation(
- 'toggle-handles',
- 'block',
- { id, horizontalHandles: newHorizontalHandles },
- () => workflowStore.toggleBlockHandles(id)
- )
+ for (const id of ids) {
+ const block = workflowStore.blocks[id]
+ if (block) {
+ previousStates[id] = block.horizontalHandles ?? false
+ validIds.push(id)
+ }
+ }
+
+ if (validIds.length === 0) return
+
+ const operationId = crypto.randomUUID()
+
+ addToQueue({
+ id: operationId,
+ operation: {
+ operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_HANDLES,
+ target: OPERATION_TARGETS.BLOCKS,
+ payload: { blockIds: validIds, previousStates },
+ },
+ workflowId: activeWorkflowId || '',
+ userId: session?.user?.id || 'unknown',
+ })
+
+ for (const id of validIds) {
+ workflowStore.toggleBlockHandles(id)
+ }
+
+ undoRedo.recordBatchToggleHandles(validIds, previousStates)
},
- [executeQueuedOperation, workflowStore]
+ [addToQueue, activeWorkflowId, session?.user?.id, workflowStore, undoRedo]
)
const collaborativeAddEdge = useCallback(
(edge: Edge) => {
- executeQueuedOperation('add', 'edge', edge, () => workflowStore.addEdge(edge))
- // Only record edge addition if it's not part of a parent update operation
+ executeQueuedOperation(EDGE_OPERATIONS.ADD, OPERATION_TARGETS.EDGE, edge, () =>
+ workflowStore.addEdge(edge)
+ )
if (!skipEdgeRecording.current) {
undoRedo.recordAddEdge(edge.id)
}
@@ -920,13 +1119,11 @@ export function useCollaborativeWorkflow() {
(edgeId: string) => {
const edge = workflowStore.edges.find((e) => e.id === edgeId)
- // Skip if edge doesn't exist (already removed during cascade deletion)
if (!edge) {
logger.debug('Edge already removed, skipping operation', { edgeId })
return
}
- // Check if the edge's source and target blocks still exist
const sourceExists = workflowStore.blocks[edge.source]
const targetExists = workflowStore.blocks[edge.target]
@@ -939,23 +1136,75 @@ export function useCollaborativeWorkflow() {
return
}
- // Only record edge removal if it's not part of a parent update operation
if (!skipEdgeRecording.current) {
- undoRedo.recordRemoveEdge(edgeId, edge)
+ undoRedo.recordBatchRemoveEdges([edge])
}
- executeQueuedOperation('remove', 'edge', { id: edgeId }, () =>
+ executeQueuedOperation(EDGE_OPERATIONS.REMOVE, OPERATION_TARGETS.EDGE, { id: edgeId }, () =>
workflowStore.removeEdge(edgeId)
)
},
[executeQueuedOperation, workflowStore, undoRedo]
)
+ const collaborativeBatchRemoveEdges = useCallback(
+ (edgeIds: string[], options?: { skipUndoRedo?: boolean }) => {
+ if (!isInActiveRoom()) {
+ logger.debug('Skipping batch remove edges - not in active workflow')
+ return false
+ }
+
+ if (edgeIds.length === 0) return false
+
+ const edgeSnapshots: Edge[] = []
+ const validEdgeIds: string[] = []
+
+ for (const edgeId of edgeIds) {
+ const edge = workflowStore.edges.find((e) => e.id === edgeId)
+ if (edge) {
+ const sourceExists = workflowStore.blocks[edge.source]
+ const targetExists = workflowStore.blocks[edge.target]
+ if (sourceExists && targetExists) {
+ edgeSnapshots.push(edge)
+ validEdgeIds.push(edgeId)
+ }
+ }
+ }
+
+ if (validEdgeIds.length === 0) {
+ logger.debug('No valid edges to remove')
+ return false
+ }
+
+ const operationId = crypto.randomUUID()
+
+ addToQueue({
+ id: operationId,
+ operation: {
+ operation: EDGES_OPERATIONS.BATCH_REMOVE_EDGES,
+ target: OPERATION_TARGETS.EDGES,
+ payload: { ids: validEdgeIds },
+ },
+ workflowId: activeWorkflowId || '',
+ userId: session?.user?.id || 'unknown',
+ })
+
+ validEdgeIds.forEach((id) => workflowStore.removeEdge(id))
+
+ if (!options?.skipUndoRedo && edgeSnapshots.length > 0) {
+ undoRedo.recordBatchRemoveEdges(edgeSnapshots)
+ }
+
+ logger.info('Batch removed edges', { count: validEdgeIds.length })
+ return true
+ },
+ [isInActiveRoom, workflowStore, addToQueue, activeWorkflowId, session, undoRedo]
+ )
+
const collaborativeSetSubblockValue = useCallback(
(blockId: string, subblockId: string, value: any, options?: { _visited?: Set }) => {
if (isApplyingRemoteChange.current) return
- // Skip socket operations when viewing baseline diff
if (isBaselineDiffView) {
logger.debug('Skipping collaborative subblock update while viewing baseline diff')
return
@@ -971,28 +1220,23 @@ export function useCollaborativeWorkflow() {
return
}
- // Generate operation ID for queue tracking
const operationId = crypto.randomUUID()
- // Get fresh activeWorkflowId from store to avoid stale closure
const currentActiveWorkflowId = useWorkflowRegistry.getState().activeWorkflowId
- // Add to queue for retry mechanism
addToQueue({
id: operationId,
operation: {
- operation: 'subblock-update',
- target: 'subblock',
+ operation: SUBBLOCK_OPERATIONS.UPDATE,
+ target: OPERATION_TARGETS.SUBBLOCK,
payload: { blockId, subblockId, value },
},
workflowId: currentActiveWorkflowId || '',
userId: session?.user?.id || 'unknown',
})
- // Apply locally first (immediate UI feedback)
subBlockStore.setValue(blockId, subblockId, value)
- // Declarative clearing: clear sub-blocks that depend on this subblockId
try {
const visited = options?._visited || new Set()
if (visited.has(subblockId)) return
@@ -1004,9 +1248,7 @@ export function useCollaborativeWorkflow() {
(sb: any) => Array.isArray(sb.dependsOn) && sb.dependsOn.includes(subblockId)
)
for (const dep of dependents) {
- // Skip clearing if the dependent is the same field
if (!dep?.id || dep.id === subblockId) continue
- // Cascade using the same collaborative path so it emits and further cascades
collaborativeSetSubblockValue(blockId, dep.id, '', { _visited: visited })
}
}
@@ -1049,8 +1291,8 @@ export function useCollaborativeWorkflow() {
addToQueue({
id: operationId,
operation: {
- operation: 'subblock-update',
- target: 'subblock',
+ operation: SUBBLOCK_OPERATIONS.UPDATE,
+ target: OPERATION_TARGETS.SUBBLOCK,
payload: { blockId, subblockId, value },
},
workflowId: activeWorkflowId || '',
@@ -1096,12 +1338,17 @@ export function useCollaborativeWorkflow() {
doWhileCondition: existingDoWhileCondition ?? '',
}
- executeQueuedOperation('update', 'subflow', { id: loopId, type: 'loop', config }, () => {
- workflowStore.updateLoopType(loopId, loopType)
- workflowStore.setLoopForEachItems(loopId, existingForEachItems ?? '')
- workflowStore.setLoopWhileCondition(loopId, existingWhileCondition ?? '')
- workflowStore.setLoopDoWhileCondition(loopId, existingDoWhileCondition ?? '')
- })
+ executeQueuedOperation(
+ SUBFLOW_OPERATIONS.UPDATE,
+ OPERATION_TARGETS.SUBFLOW,
+ { id: loopId, type: 'loop', config },
+ () => {
+ workflowStore.updateLoopType(loopId, loopType)
+ workflowStore.setLoopForEachItems(loopId, existingForEachItems ?? '')
+ workflowStore.setLoopWhileCondition(loopId, existingWhileCondition ?? '')
+ workflowStore.setLoopDoWhileCondition(loopId, existingDoWhileCondition ?? '')
+ }
+ )
},
[executeQueuedOperation, workflowStore]
)
@@ -1134,8 +1381,8 @@ export function useCollaborativeWorkflow() {
}
executeQueuedOperation(
- 'update',
- 'subflow',
+ SUBFLOW_OPERATIONS.UPDATE,
+ OPERATION_TARGETS.SUBFLOW,
{ id: parallelId, type: 'parallel', config },
() => {
workflowStore.updateParallelType(parallelId, parallelType)
@@ -1169,8 +1416,11 @@ export function useCollaborativeWorkflow() {
forEachItems: currentCollection,
}
- executeQueuedOperation('update', 'subflow', { id: nodeId, type: 'loop', config }, () =>
- workflowStore.updateLoopCount(nodeId, count)
+ executeQueuedOperation(
+ SUBFLOW_OPERATIONS.UPDATE,
+ OPERATION_TARGETS.SUBFLOW,
+ { id: nodeId, type: 'loop', config },
+ () => workflowStore.updateLoopCount(nodeId, count)
)
} else {
const currentDistribution = currentBlock.data?.collection || ''
@@ -1184,8 +1434,11 @@ export function useCollaborativeWorkflow() {
parallelType: currentParallelType,
}
- executeQueuedOperation('update', 'subflow', { id: nodeId, type: 'parallel', config }, () =>
- workflowStore.updateParallelCount(nodeId, count)
+ executeQueuedOperation(
+ SUBFLOW_OPERATIONS.UPDATE,
+ OPERATION_TARGETS.SUBFLOW,
+ { id: nodeId, type: 'parallel', config },
+ () => workflowStore.updateParallelCount(nodeId, count)
)
}
},
@@ -1230,11 +1483,16 @@ export function useCollaborativeWorkflow() {
doWhileCondition: nextDoWhileCondition ?? '',
}
- executeQueuedOperation('update', 'subflow', { id: nodeId, type: 'loop', config }, () => {
- workflowStore.setLoopForEachItems(nodeId, nextForEachItems ?? '')
- workflowStore.setLoopWhileCondition(nodeId, nextWhileCondition ?? '')
- workflowStore.setLoopDoWhileCondition(nodeId, nextDoWhileCondition ?? '')
- })
+ executeQueuedOperation(
+ SUBFLOW_OPERATIONS.UPDATE,
+ OPERATION_TARGETS.SUBFLOW,
+ { id: nodeId, type: 'loop', config },
+ () => {
+ workflowStore.setLoopForEachItems(nodeId, nextForEachItems ?? '')
+ workflowStore.setLoopWhileCondition(nodeId, nextWhileCondition ?? '')
+ workflowStore.setLoopDoWhileCondition(nodeId, nextDoWhileCondition ?? '')
+ }
+ )
} else {
const currentCount = currentBlock.data?.count || 5
const currentParallelType = currentBlock.data?.parallelType || 'count'
@@ -1247,8 +1505,11 @@ export function useCollaborativeWorkflow() {
parallelType: currentParallelType,
}
- executeQueuedOperation('update', 'subflow', { id: nodeId, type: 'parallel', config }, () =>
- workflowStore.updateParallelCollection(nodeId, collection)
+ executeQueuedOperation(
+ SUBFLOW_OPERATIONS.UPDATE,
+ OPERATION_TARGETS.SUBFLOW,
+ { id: nodeId, type: 'parallel', config },
+ () => workflowStore.updateParallelCollection(nodeId, collection)
)
}
},
@@ -1257,15 +1518,20 @@ export function useCollaborativeWorkflow() {
const collaborativeUpdateVariable = useCallback(
(variableId: string, field: 'name' | 'value' | 'type', value: any) => {
- executeQueuedOperation('variable-update', 'variable', { variableId, field, value }, () => {
- if (field === 'name') {
- variablesStore.updateVariable(variableId, { name: value })
- } else if (field === 'value') {
- variablesStore.updateVariable(variableId, { value })
- } else if (field === 'type') {
- variablesStore.updateVariable(variableId, { type: value })
+ executeQueuedOperation(
+ VARIABLE_OPERATIONS.UPDATE,
+ OPERATION_TARGETS.VARIABLE,
+ { variableId, field, value },
+ () => {
+ if (field === 'name') {
+ variablesStore.updateVariable(variableId, { name: value })
+ } else if (field === 'value') {
+ variablesStore.updateVariable(variableId, { value })
+ } else if (field === 'type') {
+ variablesStore.updateVariable(variableId, { type: value })
+ }
}
- })
+ )
},
[executeQueuedOperation, variablesStore]
)
@@ -1287,7 +1553,12 @@ export function useCollaborativeWorkflow() {
// Queue operation with processed name for server & other clients
// Empty callback because local store is already updated above
- executeQueuedOperation('add', 'variable', payloadWithProcessedName, () => {})
+ executeQueuedOperation(
+ VARIABLE_OPERATIONS.ADD,
+ OPERATION_TARGETS.VARIABLE,
+ payloadWithProcessedName,
+ () => {}
+ )
}
return id
@@ -1299,9 +1570,14 @@ export function useCollaborativeWorkflow() {
(variableId: string) => {
cancelOperationsForVariable(variableId)
- executeQueuedOperation('remove', 'variable', { variableId }, () => {
- variablesStore.deleteVariable(variableId)
- })
+ executeQueuedOperation(
+ VARIABLE_OPERATIONS.REMOVE,
+ OPERATION_TARGETS.VARIABLE,
+ { variableId },
+ () => {
+ variablesStore.deleteVariable(variableId)
+ }
+ )
},
[executeQueuedOperation, variablesStore, cancelOperationsForVariable]
)
@@ -1337,8 +1613,8 @@ export function useCollaborativeWorkflow() {
addToQueue({
id: operationId,
operation: {
- operation: 'batch-add-blocks',
- target: 'blocks',
+ operation: BLOCKS_OPERATIONS.BATCH_ADD_BLOCKS,
+ target: OPERATION_TARGETS.BLOCKS,
payload: { blocks, edges, loops, parallels, subBlockValues },
},
workflowId: activeWorkflowId || '',
@@ -1469,8 +1745,8 @@ export function useCollaborativeWorkflow() {
addToQueue({
id: operationId,
operation: {
- operation: 'batch-remove-blocks',
- target: 'blocks',
+ operation: BLOCKS_OPERATIONS.BATCH_REMOVE_BLOCKS,
+ target: OPERATION_TARGETS.BLOCKS,
payload: { ids: Array.from(allBlocksToRemove) },
},
workflowId: activeWorkflowId || '',
@@ -1512,15 +1788,17 @@ export function useCollaborativeWorkflow() {
// Collaborative operations
collaborativeBatchUpdatePositions,
collaborativeUpdateBlockName,
- collaborativeToggleBlockEnabled,
+ collaborativeBatchToggleBlockEnabled,
collaborativeUpdateParentId,
+ collaborativeBatchUpdateParent,
collaborativeToggleBlockAdvancedMode,
collaborativeToggleBlockTriggerMode,
- collaborativeToggleBlockHandles,
+ collaborativeBatchToggleBlockHandles,
collaborativeBatchAddBlocks,
collaborativeBatchRemoveBlocks,
collaborativeAddEdge,
collaborativeRemoveEdge,
+ collaborativeBatchRemoveEdges,
collaborativeSetSubblockValue,
collaborativeSetTagSelection,
diff --git a/apps/sim/hooks/use-forwarded-ref.ts b/apps/sim/hooks/use-forwarded-ref.ts
deleted file mode 100644
index 70bbc4ad37..0000000000
--- a/apps/sim/hooks/use-forwarded-ref.ts
+++ /dev/null
@@ -1,25 +0,0 @@
-import { type MutableRefObject, useEffect, useRef } from 'react'
-
-/**
- * A hook that handles forwarded refs and returns a mutable ref object
- * Useful for components that need both a forwarded ref and a local ref
- * @param forwardedRef The forwarded ref from React.forwardRef
- * @returns A mutable ref object that can be used locally
- */
-export function useForwardedRef(
- forwardedRef: React.ForwardedRef
-): MutableRefObject {
- const innerRef = useRef(null)
-
- useEffect(() => {
- if (!forwardedRef) return
-
- if (typeof forwardedRef === 'function') {
- forwardedRef(innerRef.current)
- } else {
- forwardedRef.current = innerRef.current
- }
- }, [forwardedRef])
-
- return innerRef
-}
diff --git a/apps/sim/hooks/use-subscription-state.ts b/apps/sim/hooks/use-subscription-state.ts
deleted file mode 100644
index 5bb52ad135..0000000000
--- a/apps/sim/hooks/use-subscription-state.ts
+++ /dev/null
@@ -1,217 +0,0 @@
-import { useCallback, useEffect, useState } from 'react'
-import { createLogger } from '@sim/logger'
-import { DEFAULT_FREE_CREDITS } from '@/lib/billing/constants'
-
-const logger = createLogger('useSubscriptionState')
-
-interface UsageData {
- current: number
- limit: number
- percentUsed: number
- isWarning: boolean
- isExceeded: boolean
- billingPeriodStart: Date | null
- billingPeriodEnd: Date | null
- lastPeriodCost: number
-}
-
-interface SubscriptionState {
- isPaid: boolean
- isPro: boolean
- isTeam: boolean
- isEnterprise: boolean
- plan: string
- status: string | null
- seats: number | null
- metadata: any | null
- usage: UsageData
-}
-
-/**
- * Consolidated hook for subscription state management
- * Combines subscription status, features, and usage data
- */
-export function useSubscriptionState() {
- const [data, setData] = useState(null)
- const [isLoading, setIsLoading] = useState(true)
- const [error, setError] = useState(null)
-
- const fetchSubscriptionState = useCallback(async () => {
- try {
- setIsLoading(true)
- setError(null)
-
- const response = await fetch('/api/billing?context=user')
-
- if (!response.ok) {
- throw new Error(`HTTP error! status: ${response.status}`)
- }
-
- const result = await response.json()
- const subscriptionData = result.data
- setData(subscriptionData)
- } catch (error) {
- const err = error instanceof Error ? error : new Error('Failed to fetch subscription state')
- logger.error('Failed to fetch subscription state', { error })
- setError(err)
- } finally {
- setIsLoading(false)
- }
- }, [])
-
- useEffect(() => {
- fetchSubscriptionState()
- }, [fetchSubscriptionState])
-
- const refetch = useCallback(() => {
- return fetchSubscriptionState()
- }, [fetchSubscriptionState])
-
- return {
- subscription: {
- isPaid: data?.isPaid ?? false,
- isPro: data?.isPro ?? false,
- isTeam: data?.isTeam ?? false,
- isEnterprise: data?.isEnterprise ?? false,
- isFree: !(data?.isPaid ?? false),
- plan: data?.plan ?? 'free',
- status: data?.status,
- seats: data?.seats,
- metadata: data?.metadata,
- },
-
- usage: {
- current: data?.usage?.current ?? 0,
- limit: data?.usage?.limit ?? DEFAULT_FREE_CREDITS,
- percentUsed: data?.usage?.percentUsed ?? 0,
- isWarning: data?.usage?.isWarning ?? false,
- isExceeded: data?.usage?.isExceeded ?? false,
- billingPeriodStart: data?.usage?.billingPeriodStart
- ? new Date(data.usage.billingPeriodStart)
- : null,
- billingPeriodEnd: data?.usage?.billingPeriodEnd
- ? new Date(data.usage.billingPeriodEnd)
- : null,
- lastPeriodCost: data?.usage?.lastPeriodCost ?? 0,
- },
-
- isLoading,
- error,
- refetch,
-
- isAtLeastPro: () => {
- return data?.isPro || data?.isTeam || data?.isEnterprise || false
- },
-
- isAtLeastTeam: () => {
- return data?.isTeam || data?.isEnterprise || false
- },
-
- canUpgrade: () => {
- return data?.plan === 'free' || data?.plan === 'pro'
- },
-
- getBillingStatus: () => {
- const usage = data?.usage
- if (!usage) return 'unknown'
-
- if (usage.isExceeded) return 'exceeded'
- if (usage.isWarning) return 'warning'
- return 'ok'
- },
-
- getRemainingBudget: () => {
- const usage = data?.usage
- if (!usage) return 0
- return Math.max(0, usage.limit - usage.current)
- },
-
- getDaysRemainingInPeriod: () => {
- const usage = data?.usage
- if (!usage?.billingPeriodEnd) return null
-
- const now = new Date()
- const endDate = new Date(usage.billingPeriodEnd)
- const diffTime = endDate.getTime() - now.getTime()
- const diffDays = Math.ceil(diffTime / (1000 * 60 * 60 * 24))
-
- return Math.max(0, diffDays)
- },
- }
-}
-
-/**
- * Hook for usage limit information with editing capabilities
- */
-export function useUsageLimit() {
- const [data, setData] = useState(null)
- const [isLoading, setIsLoading] = useState(true)
- const [error, setError] = useState(null)
-
- const fetchUsageLimit = useCallback(async () => {
- try {
- setIsLoading(true)
- setError(null)
-
- const response = await fetch('/api/usage?context=user')
-
- if (!response.ok) {
- throw new Error(`HTTP error! status: ${response.status}`)
- }
-
- const limitData = await response.json()
- setData(limitData)
- } catch (error) {
- const err = error instanceof Error ? error : new Error('Failed to fetch usage limit')
- logger.error('Failed to fetch usage limit', { error })
- setError(err)
- } finally {
- setIsLoading(false)
- }
- }, [])
-
- useEffect(() => {
- fetchUsageLimit()
- }, [fetchUsageLimit])
-
- const refetch = useCallback(() => {
- return fetchUsageLimit()
- }, [fetchUsageLimit])
-
- const updateLimit = async (newLimit: number) => {
- try {
- const response = await fetch('/api/usage?context=user', {
- method: 'PUT',
- headers: {
- 'Content-Type': 'application/json',
- },
- body: JSON.stringify({ limit: newLimit }),
- })
-
- if (!response.ok) {
- const errorData = await response.json()
- throw new Error(errorData.error || 'Failed to update usage limit')
- }
-
- await refetch()
-
- return { success: true }
- } catch (error) {
- logger.error('Failed to update usage limit', { error, newLimit })
- throw error
- }
- }
-
- return {
- currentLimit: data?.currentLimit ?? DEFAULT_FREE_CREDITS,
- canEdit: data?.canEdit ?? false,
- minimumLimit: data?.minimumLimit ?? DEFAULT_FREE_CREDITS,
- plan: data?.plan ?? 'free',
- setBy: data?.setBy,
- updatedAt: data?.updatedAt ? new Date(data.updatedAt) : null,
- updateLimit,
- isLoading,
- error,
- refetch,
- }
-}
diff --git a/apps/sim/hooks/use-undo-redo.ts b/apps/sim/hooks/use-undo-redo.ts
index 33457cf390..740b50293b 100644
--- a/apps/sim/hooks/use-undo-redo.ts
+++ b/apps/sim/hooks/use-undo-redo.ts
@@ -3,14 +3,25 @@ import { createLogger } from '@sim/logger'
import type { Edge } from 'reactflow'
import { useSession } from '@/lib/auth/auth-client'
import { enqueueReplaceWorkflowState } from '@/lib/workflows/operations/socket-operations'
+import {
+ BLOCK_OPERATIONS,
+ BLOCKS_OPERATIONS,
+ EDGE_OPERATIONS,
+ EDGES_OPERATIONS,
+ OPERATION_TARGETS,
+ UNDO_REDO_OPERATIONS,
+} from '@/socket/constants'
import { useOperationQueue } from '@/stores/operation-queue/store'
import {
type BatchAddBlocksOperation,
+ type BatchAddEdgesOperation,
+ type BatchMoveBlocksOperation,
type BatchRemoveBlocksOperation,
+ type BatchRemoveEdgesOperation,
+ type BatchToggleEnabledOperation,
+ type BatchToggleHandlesOperation,
+ type BatchUpdateParentOperation,
createOperationEntry,
- type MoveBlockOperation,
- type Operation,
- type RemoveEdgeOperation,
runWithUndoRedoRecordingSuspended,
type UpdateParentOperation,
useUndoRedoStore,
@@ -42,7 +53,7 @@ export function useUndoRedo() {
const operation: BatchAddBlocksOperation = {
id: crypto.randomUUID(),
- type: 'batch-add-blocks',
+ type: UNDO_REDO_OPERATIONS.BATCH_ADD_BLOCKS,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
@@ -55,7 +66,7 @@ export function useUndoRedo() {
const inverse: BatchRemoveBlocksOperation = {
id: crypto.randomUUID(),
- type: 'batch-remove-blocks',
+ type: UNDO_REDO_OPERATIONS.BATCH_REMOVE_BLOCKS,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
@@ -88,7 +99,7 @@ export function useUndoRedo() {
const operation: BatchRemoveBlocksOperation = {
id: crypto.randomUUID(),
- type: 'batch-remove-blocks',
+ type: UNDO_REDO_OPERATIONS.BATCH_REMOVE_BLOCKS,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
@@ -101,7 +112,7 @@ export function useUndoRedo() {
const inverse: BatchAddBlocksOperation = {
id: crypto.randomUUID(),
- type: 'batch-add-blocks',
+ type: UNDO_REDO_OPERATIONS.BATCH_ADD_BLOCKS,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
@@ -128,25 +139,28 @@ export function useUndoRedo() {
(edgeId: string) => {
if (!activeWorkflowId) return
- const operation: Operation = {
+ const edgeSnapshot = workflowStore.edges.find((e) => e.id === edgeId)
+ if (!edgeSnapshot) {
+ logger.warn('Edge not found when recording add edge', { edgeId })
+ return
+ }
+
+ const operation: BatchAddEdgesOperation = {
id: crypto.randomUUID(),
- type: 'add-edge',
+ type: UNDO_REDO_OPERATIONS.BATCH_ADD_EDGES,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
- data: { edgeId },
+ data: { edgeSnapshots: [edgeSnapshot] },
}
- const inverse: RemoveEdgeOperation = {
+ const inverse: BatchRemoveEdgesOperation = {
id: crypto.randomUUID(),
- type: 'remove-edge',
+ type: UNDO_REDO_OPERATIONS.BATCH_REMOVE_EDGES,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
- data: {
- edgeId,
- edgeSnapshot: workflowStore.edges.find((e) => e.id === edgeId) || null,
- },
+ data: { edgeSnapshots: [edgeSnapshot] },
}
const entry = createOperationEntry(operation, inverse)
@@ -157,77 +171,81 @@ export function useUndoRedo() {
[activeWorkflowId, userId, workflowStore, undoRedoStore]
)
- const recordRemoveEdge = useCallback(
- (edgeId: string, edgeSnapshot: Edge) => {
- if (!activeWorkflowId) return
+ const recordBatchRemoveEdges = useCallback(
+ (edgeSnapshots: Edge[]) => {
+ if (!activeWorkflowId || edgeSnapshots.length === 0) return
- const operation: RemoveEdgeOperation = {
+ const operation: BatchRemoveEdgesOperation = {
id: crypto.randomUUID(),
- type: 'remove-edge',
+ type: UNDO_REDO_OPERATIONS.BATCH_REMOVE_EDGES,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
data: {
- edgeId,
- edgeSnapshot,
+ edgeSnapshots,
},
}
- const inverse: Operation = {
+ const inverse: BatchAddEdgesOperation = {
id: crypto.randomUUID(),
- type: 'add-edge',
+ type: UNDO_REDO_OPERATIONS.BATCH_ADD_EDGES,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
- data: { edgeId },
+ data: {
+ edgeSnapshots,
+ },
}
const entry = createOperationEntry(operation, inverse)
undoRedoStore.push(activeWorkflowId, userId, entry)
- logger.debug('Recorded remove edge', { edgeId, workflowId: activeWorkflowId })
+ logger.debug('Recorded batch remove edges', {
+ edgeCount: edgeSnapshots.length,
+ workflowId: activeWorkflowId,
+ })
},
[activeWorkflowId, userId, undoRedoStore]
)
- const recordMove = useCallback(
+ const recordBatchMoveBlocks = useCallback(
(
- blockId: string,
- before: { x: number; y: number; parentId?: string },
- after: { x: number; y: number; parentId?: string }
+ moves: Array<{
+ blockId: string
+ before: { x: number; y: number; parentId?: string }
+ after: { x: number; y: number; parentId?: string }
+ }>
) => {
- if (!activeWorkflowId) return
+ if (!activeWorkflowId || moves.length === 0) return
- const operation: MoveBlockOperation = {
+ const operation: BatchMoveBlocksOperation = {
id: crypto.randomUUID(),
- type: 'move-block',
+ type: UNDO_REDO_OPERATIONS.BATCH_MOVE_BLOCKS,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
- data: {
- blockId,
- before,
- after,
- },
+ data: { moves },
}
- const inverse: MoveBlockOperation = {
+ const inverse: BatchMoveBlocksOperation = {
id: crypto.randomUUID(),
- type: 'move-block',
+ type: UNDO_REDO_OPERATIONS.BATCH_MOVE_BLOCKS,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
data: {
- blockId,
- before: after,
- after: before,
+ moves: moves.map((m) => ({
+ blockId: m.blockId,
+ before: m.after,
+ after: m.before,
+ })),
},
}
const entry = createOperationEntry(operation, inverse)
undoRedoStore.push(activeWorkflowId, userId, entry)
- logger.debug('Recorded move', { blockId, from: before, to: after })
+ logger.debug('Recorded batch move', { blockCount: moves.length })
},
[activeWorkflowId, userId, undoRedoStore]
)
@@ -245,7 +263,7 @@ export function useUndoRedo() {
const operation: UpdateParentOperation = {
id: crypto.randomUUID(),
- type: 'update-parent',
+ type: UNDO_REDO_OPERATIONS.UPDATE_PARENT,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
@@ -261,7 +279,7 @@ export function useUndoRedo() {
const inverse: UpdateParentOperation = {
id: crypto.randomUUID(),
- type: 'update-parent',
+ type: UNDO_REDO_OPERATIONS.UPDATE_PARENT,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
@@ -271,7 +289,7 @@ export function useUndoRedo() {
newParentId: oldParentId,
oldPosition: newPosition,
newPosition: oldPosition,
- affectedEdges, // Same edges need to be restored
+ affectedEdges,
},
}
@@ -288,6 +306,117 @@ export function useUndoRedo() {
[activeWorkflowId, userId, undoRedoStore]
)
+ const recordBatchUpdateParent = useCallback(
+ (
+ updates: Array<{
+ blockId: string
+ oldParentId?: string
+ newParentId?: string
+ oldPosition: { x: number; y: number }
+ newPosition: { x: number; y: number }
+ affectedEdges?: Edge[]
+ }>
+ ) => {
+ if (!activeWorkflowId || updates.length === 0) return
+
+ const operation: BatchUpdateParentOperation = {
+ id: crypto.randomUUID(),
+ type: UNDO_REDO_OPERATIONS.BATCH_UPDATE_PARENT,
+ timestamp: Date.now(),
+ workflowId: activeWorkflowId,
+ userId,
+ data: { updates },
+ }
+
+ const inverse: BatchUpdateParentOperation = {
+ id: crypto.randomUUID(),
+ type: UNDO_REDO_OPERATIONS.BATCH_UPDATE_PARENT,
+ timestamp: Date.now(),
+ workflowId: activeWorkflowId,
+ userId,
+ data: {
+ updates: updates.map((u) => ({
+ blockId: u.blockId,
+ oldParentId: u.newParentId,
+ newParentId: u.oldParentId,
+ oldPosition: u.newPosition,
+ newPosition: u.oldPosition,
+ affectedEdges: u.affectedEdges,
+ })),
+ },
+ }
+
+ const entry = createOperationEntry(operation, inverse)
+ undoRedoStore.push(activeWorkflowId, userId, entry)
+
+ logger.debug('Recorded batch update parent', {
+ updateCount: updates.length,
+ workflowId: activeWorkflowId,
+ })
+ },
+ [activeWorkflowId, userId, undoRedoStore]
+ )
+
+ const recordBatchToggleEnabled = useCallback(
+ (blockIds: string[], previousStates: Record) => {
+ if (!activeWorkflowId || blockIds.length === 0) return
+
+ const operation: BatchToggleEnabledOperation = {
+ id: crypto.randomUUID(),
+ type: UNDO_REDO_OPERATIONS.BATCH_TOGGLE_ENABLED,
+ timestamp: Date.now(),
+ workflowId: activeWorkflowId,
+ userId,
+ data: { blockIds, previousStates },
+ }
+
+ const inverse: BatchToggleEnabledOperation = {
+ id: crypto.randomUUID(),
+ type: UNDO_REDO_OPERATIONS.BATCH_TOGGLE_ENABLED,
+ timestamp: Date.now(),
+ workflowId: activeWorkflowId,
+ userId,
+ data: { blockIds, previousStates },
+ }
+
+ const entry = createOperationEntry(operation, inverse)
+ undoRedoStore.push(activeWorkflowId, userId, entry)
+
+ logger.debug('Recorded batch toggle enabled', { blockIds, previousStates })
+ },
+ [activeWorkflowId, userId, undoRedoStore]
+ )
+
+ const recordBatchToggleHandles = useCallback(
+ (blockIds: string[], previousStates: Record) => {
+ if (!activeWorkflowId || blockIds.length === 0) return
+
+ const operation: BatchToggleHandlesOperation = {
+ id: crypto.randomUUID(),
+ type: UNDO_REDO_OPERATIONS.BATCH_TOGGLE_HANDLES,
+ timestamp: Date.now(),
+ workflowId: activeWorkflowId,
+ userId,
+ data: { blockIds, previousStates },
+ }
+
+ const inverse: BatchToggleHandlesOperation = {
+ id: crypto.randomUUID(),
+ type: UNDO_REDO_OPERATIONS.BATCH_TOGGLE_HANDLES,
+ timestamp: Date.now(),
+ workflowId: activeWorkflowId,
+ userId,
+ data: { blockIds, previousStates },
+ }
+
+ const entry = createOperationEntry(operation, inverse)
+ undoRedoStore.push(activeWorkflowId, userId, entry)
+
+ logger.debug('Recorded batch toggle handles', { blockIds, previousStates })
+ },
+ [activeWorkflowId, userId, undoRedoStore]
+ )
+
const undo = useCallback(async () => {
if (!activeWorkflowId) return
@@ -307,7 +436,7 @@ export function useUndoRedo() {
const opId = crypto.randomUUID()
switch (entry.inverse.type) {
- case 'batch-remove-blocks': {
+ case UNDO_REDO_OPERATIONS.BATCH_REMOVE_BLOCKS: {
const batchRemoveOp = entry.inverse as BatchRemoveBlocksOperation
const { blockSnapshots } = batchRemoveOp.data
const blockIds = blockSnapshots.map((b) => b.id)
@@ -344,8 +473,8 @@ export function useUndoRedo() {
addToQueue({
id: opId,
operation: {
- operation: 'batch-remove-blocks',
- target: 'blocks',
+ operation: BLOCKS_OPERATIONS.BATCH_REMOVE_BLOCKS,
+ target: OPERATION_TARGETS.BLOCKS,
payload: { ids: existingBlockIds },
},
workflowId: activeWorkflowId,
@@ -355,8 +484,9 @@ export function useUndoRedo() {
existingBlockIds.forEach((id) => workflowStore.removeBlock(id))
break
}
- case 'batch-add-blocks': {
- const batchAddOp = entry.operation as BatchAddBlocksOperation
+ case UNDO_REDO_OPERATIONS.BATCH_ADD_BLOCKS: {
+ // Undoing a removal: inverse is batch-add-blocks, use entry.inverse for data
+ const batchAddOp = entry.inverse as BatchAddBlocksOperation
const { blockSnapshots, edgeSnapshots, subBlockValues } = batchAddOp.data
const blocksToAdd = blockSnapshots.filter((b) => !workflowStore.blocks[b.id])
@@ -368,8 +498,8 @@ export function useUndoRedo() {
addToQueue({
id: opId,
operation: {
- operation: 'batch-add-blocks',
- target: 'blocks',
+ operation: BLOCKS_OPERATIONS.BATCH_ADD_BLOCKS,
+ target: OPERATION_TARGETS.BLOCKS,
payload: {
blocks: blocksToAdd,
edges: edgeSnapshots || [],
@@ -422,127 +552,117 @@ export function useUndoRedo() {
}
break
}
- case 'remove-edge': {
- const removeEdgeInverse = entry.inverse as RemoveEdgeOperation
- const { edgeId } = removeEdgeInverse.data
- if (workflowStore.edges.find((e) => e.id === edgeId)) {
+ case UNDO_REDO_OPERATIONS.BATCH_REMOVE_EDGES: {
+ // Undo batch-add-edges: inverse is batch-remove-edges, so remove the edges
+ const batchRemoveInverse = entry.inverse as BatchRemoveEdgesOperation
+ const { edgeSnapshots } = batchRemoveInverse.data
+
+ const edgesToRemove = edgeSnapshots
+ .filter((e) => workflowStore.edges.find((edge) => edge.id === e.id))
+ .map((e) => e.id)
+
+ if (edgesToRemove.length > 0) {
addToQueue({
id: opId,
operation: {
- operation: 'remove',
- target: 'edge',
- payload: {
- id: edgeId,
- isUndo: true,
- originalOpId: entry.id,
- },
+ operation: EDGES_OPERATIONS.BATCH_REMOVE_EDGES,
+ target: OPERATION_TARGETS.EDGES,
+ payload: { ids: edgesToRemove },
},
workflowId: activeWorkflowId,
userId,
})
- workflowStore.removeEdge(edgeId)
- } else {
- logger.debug('Undo remove-edge skipped; edge missing', {
- edgeId,
- })
+ edgesToRemove.forEach((id) => workflowStore.removeEdge(id))
}
+ logger.debug('Undid batch-add-edges', { edgeCount: edgesToRemove.length })
break
}
- case 'add-edge': {
- const originalOp = entry.operation as RemoveEdgeOperation
- const { edgeSnapshot } = originalOp.data
- // Skip if snapshot missing or already exists
- if (!edgeSnapshot || workflowStore.edges.find((e) => e.id === edgeSnapshot.id)) {
- logger.debug('Undo add-edge skipped', {
- hasSnapshot: Boolean(edgeSnapshot),
+ case UNDO_REDO_OPERATIONS.BATCH_ADD_EDGES: {
+ // Undo batch-remove-edges: inverse is batch-add-edges, so add edges back
+ const batchAddInverse = entry.inverse as BatchAddEdgesOperation
+ const { edgeSnapshots } = batchAddInverse.data
+
+ const edgesToAdd = edgeSnapshots.filter(
+ (e) => !workflowStore.edges.find((edge) => edge.id === e.id)
+ )
+
+ if (edgesToAdd.length > 0) {
+ addToQueue({
+ id: opId,
+ operation: {
+ operation: EDGES_OPERATIONS.BATCH_ADD_EDGES,
+ target: OPERATION_TARGETS.EDGES,
+ payload: { edges: edgesToAdd },
+ },
+ workflowId: activeWorkflowId,
+ userId,
})
- break
+ edgesToAdd.forEach((edge) => workflowStore.addEdge(edge))
}
- addToQueue({
- id: opId,
- operation: {
- operation: 'add',
- target: 'edge',
- payload: { ...edgeSnapshot, isUndo: true, originalOpId: entry.id },
- },
- workflowId: activeWorkflowId,
- userId,
- })
- workflowStore.addEdge(edgeSnapshot)
+ logger.debug('Undid batch-remove-edges', { edgeCount: edgesToAdd.length })
break
}
- case 'move-block': {
- const moveOp = entry.inverse as MoveBlockOperation
+ case UNDO_REDO_OPERATIONS.BATCH_MOVE_BLOCKS: {
+ const batchMoveOp = entry.inverse as BatchMoveBlocksOperation
const currentBlocks = useWorkflowStore.getState().blocks
- if (currentBlocks[moveOp.data.blockId]) {
- // Apply the inverse's target as the undo result (inverse.after)
+ const positionUpdates: Array<{ id: string; position: { x: number; y: number } }> = []
+
+ for (const move of batchMoveOp.data.moves) {
+ if (currentBlocks[move.blockId]) {
+ positionUpdates.push({
+ id: move.blockId,
+ position: { x: move.after.x, y: move.after.y },
+ })
+ workflowStore.updateBlockPosition(move.blockId, {
+ x: move.after.x,
+ y: move.after.y,
+ })
+ }
+ }
+
+ if (positionUpdates.length > 0) {
addToQueue({
id: opId,
operation: {
- operation: 'update-position',
- target: 'block',
- payload: {
- id: moveOp.data.blockId,
- position: { x: moveOp.data.after.x, y: moveOp.data.after.y },
- parentId: moveOp.data.after.parentId,
- commit: true,
- isUndo: true,
- originalOpId: entry.id,
- },
+ operation: BLOCKS_OPERATIONS.BATCH_UPDATE_POSITIONS,
+ target: OPERATION_TARGETS.BLOCKS,
+ payload: { updates: positionUpdates },
},
workflowId: activeWorkflowId,
userId,
})
- // Use the store from the hook context for React re-renders
- workflowStore.updateBlockPosition(moveOp.data.blockId, {
- x: moveOp.data.after.x,
- y: moveOp.data.after.y,
- })
- if (moveOp.data.after.parentId !== moveOp.data.before.parentId) {
- workflowStore.updateParentId(
- moveOp.data.blockId,
- moveOp.data.after.parentId || '',
- 'parent'
- )
- }
- } else {
- logger.debug('Undo move-block skipped; block missing', {
- blockId: moveOp.data.blockId,
- })
}
break
}
- case 'update-parent': {
- // Undo parent update means reverting to the old parent and position
+ case UNDO_REDO_OPERATIONS.UPDATE_PARENT: {
const updateOp = entry.inverse as UpdateParentOperation
const { blockId, newParentId, newPosition, affectedEdges } = updateOp.data
if (workflowStore.blocks[blockId]) {
- // If we're moving back INTO a subflow, restore edges first
if (newParentId && affectedEdges && affectedEdges.length > 0) {
- affectedEdges.forEach((edge) => {
- if (!workflowStore.edges.find((e) => e.id === edge.id)) {
- workflowStore.addEdge(edge)
- addToQueue({
- id: crypto.randomUUID(),
- operation: {
- operation: 'add',
- target: 'edge',
- payload: { ...edge, isUndo: true },
- },
- workflowId: activeWorkflowId,
- userId,
- })
- }
- })
+ const edgesToAdd = affectedEdges.filter(
+ (e) => !workflowStore.edges.find((edge) => edge.id === e.id)
+ )
+ if (edgesToAdd.length > 0) {
+ addToQueue({
+ id: crypto.randomUUID(),
+ operation: {
+ operation: EDGES_OPERATIONS.BATCH_ADD_EDGES,
+ target: OPERATION_TARGETS.EDGES,
+ payload: { edges: edgesToAdd },
+ },
+ workflowId: activeWorkflowId,
+ userId,
+ })
+ edgesToAdd.forEach((edge) => workflowStore.addEdge(edge))
+ }
}
- // Send position update to server
addToQueue({
id: crypto.randomUUID(),
operation: {
- operation: 'update-position',
- target: 'block',
+ operation: BLOCK_OPERATIONS.UPDATE_POSITION,
+ target: OPERATION_TARGETS.BLOCK,
payload: {
id: blockId,
position: newPosition,
@@ -562,8 +682,8 @@ export function useUndoRedo() {
addToQueue({
id: opId,
operation: {
- operation: 'update-parent',
- target: 'block',
+ operation: BLOCK_OPERATIONS.UPDATE_PARENT,
+ target: OPERATION_TARGETS.BLOCK,
payload: {
id: blockId,
parentId: newParentId || '',
@@ -587,8 +707,8 @@ export function useUndoRedo() {
addToQueue({
id: crypto.randomUUID(),
operation: {
- operation: 'remove',
- target: 'edge',
+ operation: EDGE_OPERATIONS.REMOVE,
+ target: OPERATION_TARGETS.EDGE,
payload: { id: edge.id, isUndo: true },
},
workflowId: activeWorkflowId,
@@ -602,8 +722,142 @@ export function useUndoRedo() {
}
break
}
- case 'apply-diff': {
- // Undo apply-diff means clearing the diff and restoring baseline
+ case UNDO_REDO_OPERATIONS.BATCH_UPDATE_PARENT: {
+ const batchUpdateOp = entry.inverse as BatchUpdateParentOperation
+ const { updates } = batchUpdateOp.data
+
+ const validUpdates = updates.filter((u) => workflowStore.blocks[u.blockId])
+ if (validUpdates.length === 0) {
+ logger.debug('Undo batch-update-parent skipped; no blocks exist')
+ break
+ }
+
+ // Process each update
+ for (const update of validUpdates) {
+ const { blockId, newParentId, newPosition, affectedEdges } = update
+
+ // Moving OUT of subflow (undoing insert) → restore edges first
+ if (!newParentId && affectedEdges && affectedEdges.length > 0) {
+ const edgesToAdd = affectedEdges.filter(
+ (e) => !workflowStore.edges.find((edge) => edge.id === e.id)
+ )
+ if (edgesToAdd.length > 0) {
+ addToQueue({
+ id: crypto.randomUUID(),
+ operation: {
+ operation: EDGES_OPERATIONS.BATCH_ADD_EDGES,
+ target: OPERATION_TARGETS.EDGES,
+ payload: { edges: edgesToAdd },
+ },
+ workflowId: activeWorkflowId,
+ userId,
+ })
+ edgesToAdd.forEach((edge) => workflowStore.addEdge(edge))
+ }
+ }
+
+ // Moving INTO subflow (undoing removal) → remove edges first
+ if (newParentId && affectedEdges && affectedEdges.length > 0) {
+ affectedEdges.forEach((edge) => {
+ if (workflowStore.edges.find((e) => e.id === edge.id)) {
+ workflowStore.removeEdge(edge.id)
+ }
+ })
+ addToQueue({
+ id: crypto.randomUUID(),
+ operation: {
+ operation: EDGES_OPERATIONS.BATCH_REMOVE_EDGES,
+ target: OPERATION_TARGETS.EDGES,
+ payload: { edgeIds: affectedEdges.map((e) => e.id) },
+ },
+ workflowId: activeWorkflowId,
+ userId,
+ })
+ }
+
+ // Update position and parent locally
+ workflowStore.updateBlockPosition(blockId, newPosition)
+ workflowStore.updateParentId(blockId, newParentId || '', 'parent')
+ }
+
+ // Send batch update to server
+ addToQueue({
+ id: opId,
+ operation: {
+ operation: BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT,
+ target: OPERATION_TARGETS.BLOCKS,
+ payload: {
+ updates: validUpdates.map((u) => ({
+ id: u.blockId,
+ parentId: u.newParentId || '',
+ position: u.newPosition,
+ })),
+ },
+ },
+ workflowId: activeWorkflowId,
+ userId,
+ })
+
+ logger.debug('Undid batch-update-parent', { updateCount: validUpdates.length })
+ break
+ }
+ case UNDO_REDO_OPERATIONS.BATCH_TOGGLE_ENABLED: {
+ const toggleOp = entry.inverse as BatchToggleEnabledOperation
+ const { blockIds, previousStates } = toggleOp.data
+
+ const validBlockIds = blockIds.filter((id) => workflowStore.blocks[id])
+ if (validBlockIds.length === 0) {
+ logger.debug('Undo batch-toggle-enabled skipped; no blocks exist')
+ break
+ }
+
+ addToQueue({
+ id: opId,
+ operation: {
+ operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED,
+ target: OPERATION_TARGETS.BLOCKS,
+ payload: { blockIds: validBlockIds, previousStates },
+ },
+ workflowId: activeWorkflowId,
+ userId,
+ })
+
+ // Use setBlockEnabled to directly restore to previous state
+ // This is more robust than conditional toggle in collaborative scenarios
+ validBlockIds.forEach((blockId) => {
+ workflowStore.setBlockEnabled(blockId, previousStates[blockId])
+ })
+ break
+ }
+ case UNDO_REDO_OPERATIONS.BATCH_TOGGLE_HANDLES: {
+ const toggleOp = entry.inverse as BatchToggleHandlesOperation
+ const { blockIds, previousStates } = toggleOp.data
+
+ const validBlockIds = blockIds.filter((id) => workflowStore.blocks[id])
+ if (validBlockIds.length === 0) {
+ logger.debug('Undo batch-toggle-handles skipped; no blocks exist')
+ break
+ }
+
+ addToQueue({
+ id: opId,
+ operation: {
+ operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_HANDLES,
+ target: OPERATION_TARGETS.BLOCKS,
+ payload: { blockIds: validBlockIds, previousStates },
+ },
+ workflowId: activeWorkflowId,
+ userId,
+ })
+
+ // Use setBlockHandles to directly restore to previous state
+ // This is more robust than conditional toggle in collaborative scenarios
+ validBlockIds.forEach((blockId) => {
+ workflowStore.setBlockHandles(blockId, previousStates[blockId])
+ })
+ break
+ }
+ case UNDO_REDO_OPERATIONS.APPLY_DIFF: {
const applyDiffInverse = entry.inverse as any
const { baselineSnapshot } = applyDiffInverse.data
@@ -662,12 +916,11 @@ export function useUndoRedo() {
logger.info('Undid apply-diff operation successfully')
break
}
- case 'accept-diff': {
+ case UNDO_REDO_OPERATIONS.ACCEPT_DIFF: {
// Undo accept-diff means restoring diff view with markers
const acceptDiffInverse = entry.inverse as any
const acceptDiffOp = entry.operation as any
const { beforeAccept, diffAnalysis } = acceptDiffInverse.data
- const { baselineSnapshot } = acceptDiffOp.data
const { useWorkflowDiffStore } = await import('@/stores/workflow-diff/store')
const diffStore = useWorkflowDiffStore.getState()
@@ -722,10 +975,9 @@ export function useUndoRedo() {
logger.info('Undid accept-diff operation - restored diff view')
break
}
- case 'reject-diff': {
+ case UNDO_REDO_OPERATIONS.REJECT_DIFF: {
// Undo reject-diff means restoring diff view with markers
const rejectDiffInverse = entry.inverse as any
- const rejectDiffOp = entry.operation as any
const { beforeReject, diffAnalysis, baselineSnapshot } = rejectDiffInverse.data
const { useWorkflowDiffStore } = await import('@/stores/workflow-diff/store')
const { useWorkflowStore } = await import('@/stores/workflows/workflow/store')
@@ -793,7 +1045,7 @@ export function useUndoRedo() {
const opId = crypto.randomUUID()
switch (entry.operation.type) {
- case 'batch-add-blocks': {
+ case UNDO_REDO_OPERATIONS.BATCH_ADD_BLOCKS: {
const batchOp = entry.operation as BatchAddBlocksOperation
const { blockSnapshots, edgeSnapshots, subBlockValues } = batchOp.data
@@ -806,8 +1058,8 @@ export function useUndoRedo() {
addToQueue({
id: opId,
operation: {
- operation: 'batch-add-blocks',
- target: 'blocks',
+ operation: BLOCKS_OPERATIONS.BATCH_ADD_BLOCKS,
+ target: OPERATION_TARGETS.BLOCKS,
payload: {
blocks: blocksToAdd,
edges: edgeSnapshots || [],
@@ -860,7 +1112,7 @@ export function useUndoRedo() {
}
break
}
- case 'batch-remove-blocks': {
+ case UNDO_REDO_OPERATIONS.BATCH_REMOVE_BLOCKS: {
const batchOp = entry.operation as BatchRemoveBlocksOperation
const { blockSnapshots } = batchOp.data
const blockIds = blockSnapshots.map((b) => b.id)
@@ -874,8 +1126,8 @@ export function useUndoRedo() {
addToQueue({
id: opId,
operation: {
- operation: 'batch-remove-blocks',
- target: 'blocks',
+ operation: BLOCKS_OPERATIONS.BATCH_REMOVE_BLOCKS,
+ target: OPERATION_TARGETS.BLOCKS,
payload: { ids: existingBlockIds },
},
workflowId: activeWorkflowId,
@@ -885,89 +1137,91 @@ export function useUndoRedo() {
existingBlockIds.forEach((id) => workflowStore.removeBlock(id))
break
}
- case 'add-edge': {
- // Use snapshot from inverse
- const inv = entry.inverse as RemoveEdgeOperation
- const snap = inv.data.edgeSnapshot
- if (!snap || workflowStore.edges.find((e) => e.id === snap.id)) {
- logger.debug('Redo add-edge skipped', { hasSnapshot: Boolean(snap) })
- break
+ case UNDO_REDO_OPERATIONS.BATCH_REMOVE_EDGES: {
+ // Redo batch-remove-edges: remove all edges again
+ const batchRemoveOp = entry.operation as BatchRemoveEdgesOperation
+ const { edgeSnapshots } = batchRemoveOp.data
+
+ const edgesToRemove = edgeSnapshots
+ .filter((e) => workflowStore.edges.find((edge) => edge.id === e.id))
+ .map((e) => e.id)
+
+ if (edgesToRemove.length > 0) {
+ addToQueue({
+ id: opId,
+ operation: {
+ operation: EDGES_OPERATIONS.BATCH_REMOVE_EDGES,
+ target: OPERATION_TARGETS.EDGES,
+ payload: { ids: edgesToRemove },
+ },
+ workflowId: activeWorkflowId,
+ userId,
+ })
+ edgesToRemove.forEach((id) => workflowStore.removeEdge(id))
}
- addToQueue({
- id: opId,
- operation: {
- operation: 'add',
- target: 'edge',
- payload: { ...snap, isRedo: true, originalOpId: entry.id },
- },
- workflowId: activeWorkflowId,
- userId,
- })
- workflowStore.addEdge(snap)
+
+ logger.debug('Redid batch-remove-edges', { edgeCount: edgesToRemove.length })
break
}
- case 'remove-edge': {
- const { edgeId } = entry.operation.data
- if (workflowStore.edges.find((e) => e.id === edgeId)) {
+ case UNDO_REDO_OPERATIONS.BATCH_ADD_EDGES: {
+ // Redo batch-add-edges: add all edges again
+ const batchAddOp = entry.operation as BatchAddEdgesOperation
+ const { edgeSnapshots } = batchAddOp.data
+
+ const edgesToAdd = edgeSnapshots.filter(
+ (e) => !workflowStore.edges.find((edge) => edge.id === e.id)
+ )
+
+ if (edgesToAdd.length > 0) {
addToQueue({
id: opId,
operation: {
- operation: 'remove',
- target: 'edge',
- payload: { id: edgeId, isRedo: true, originalOpId: entry.id },
+ operation: EDGES_OPERATIONS.BATCH_ADD_EDGES,
+ target: OPERATION_TARGETS.EDGES,
+ payload: { edges: edgesToAdd },
},
workflowId: activeWorkflowId,
userId,
})
- workflowStore.removeEdge(edgeId)
- } else {
- logger.debug('Redo remove-edge skipped; edge missing', {
- edgeId,
- })
+ edgesToAdd.forEach((edge) => workflowStore.addEdge(edge))
}
+
+ logger.debug('Redid batch-add-edges', { edgeCount: edgesToAdd.length })
break
}
- case 'move-block': {
- const moveOp = entry.operation as MoveBlockOperation
+ case UNDO_REDO_OPERATIONS.BATCH_MOVE_BLOCKS: {
+ const batchMoveOp = entry.operation as BatchMoveBlocksOperation
const currentBlocks = useWorkflowStore.getState().blocks
- if (currentBlocks[moveOp.data.blockId]) {
+ const positionUpdates: Array<{ id: string; position: { x: number; y: number } }> = []
+
+ for (const move of batchMoveOp.data.moves) {
+ if (currentBlocks[move.blockId]) {
+ positionUpdates.push({
+ id: move.blockId,
+ position: { x: move.after.x, y: move.after.y },
+ })
+ workflowStore.updateBlockPosition(move.blockId, {
+ x: move.after.x,
+ y: move.after.y,
+ })
+ }
+ }
+
+ if (positionUpdates.length > 0) {
addToQueue({
id: opId,
operation: {
- operation: 'update-position',
- target: 'block',
- payload: {
- id: moveOp.data.blockId,
- position: { x: moveOp.data.after.x, y: moveOp.data.after.y },
- parentId: moveOp.data.after.parentId,
- commit: true,
- isRedo: true,
- originalOpId: entry.id,
- },
+ operation: BLOCKS_OPERATIONS.BATCH_UPDATE_POSITIONS,
+ target: OPERATION_TARGETS.BLOCKS,
+ payload: { updates: positionUpdates },
},
workflowId: activeWorkflowId,
userId,
})
- // Use the store from the hook context for React re-renders
- workflowStore.updateBlockPosition(moveOp.data.blockId, {
- x: moveOp.data.after.x,
- y: moveOp.data.after.y,
- })
- if (moveOp.data.after.parentId !== moveOp.data.before.parentId) {
- workflowStore.updateParentId(
- moveOp.data.blockId,
- moveOp.data.after.parentId || '',
- 'parent'
- )
- }
- } else {
- logger.debug('Redo move-block skipped; block missing', {
- blockId: moveOp.data.blockId,
- })
}
break
}
- case 'update-parent': {
+ case UNDO_REDO_OPERATIONS.UPDATE_PARENT: {
// Redo parent update means applying the new parent and position
const updateOp = entry.operation as UpdateParentOperation
const { blockId, newParentId, newPosition, affectedEdges } = updateOp.data
@@ -981,8 +1235,8 @@ export function useUndoRedo() {
addToQueue({
id: crypto.randomUUID(),
operation: {
- operation: 'remove',
- target: 'edge',
+ operation: EDGE_OPERATIONS.REMOVE,
+ target: OPERATION_TARGETS.EDGE,
payload: { id: edge.id, isRedo: true },
},
workflowId: activeWorkflowId,
@@ -996,8 +1250,8 @@ export function useUndoRedo() {
addToQueue({
id: crypto.randomUUID(),
operation: {
- operation: 'update-position',
- target: 'block',
+ operation: BLOCK_OPERATIONS.UPDATE_POSITION,
+ target: OPERATION_TARGETS.BLOCK,
payload: {
id: blockId,
position: newPosition,
@@ -1017,8 +1271,8 @@ export function useUndoRedo() {
addToQueue({
id: opId,
operation: {
- operation: 'update-parent',
- target: 'block',
+ operation: BLOCK_OPERATIONS.UPDATE_PARENT,
+ target: OPERATION_TARGETS.BLOCK,
payload: {
id: blockId,
parentId: newParentId || '',
@@ -1035,29 +1289,165 @@ export function useUndoRedo() {
workflowStore.updateParentId(blockId, newParentId || '', 'parent')
// If we're adding TO a subflow, restore edges after
+ if (newParentId && affectedEdges && affectedEdges.length > 0) {
+ const edgesToAdd = affectedEdges.filter(
+ (e) => !workflowStore.edges.find((edge) => edge.id === e.id)
+ )
+ if (edgesToAdd.length > 0) {
+ addToQueue({
+ id: crypto.randomUUID(),
+ operation: {
+ operation: EDGES_OPERATIONS.BATCH_ADD_EDGES,
+ target: OPERATION_TARGETS.EDGES,
+ payload: { edges: edgesToAdd },
+ },
+ workflowId: activeWorkflowId,
+ userId,
+ })
+ edgesToAdd.forEach((edge) => workflowStore.addEdge(edge))
+ }
+ }
+ } else {
+ logger.debug('Redo update-parent skipped; block missing', { blockId })
+ }
+ break
+ }
+ case UNDO_REDO_OPERATIONS.BATCH_UPDATE_PARENT: {
+ const batchUpdateOp = entry.operation as BatchUpdateParentOperation
+ const { updates } = batchUpdateOp.data
+
+ const validUpdates = updates.filter((u) => workflowStore.blocks[u.blockId])
+ if (validUpdates.length === 0) {
+ logger.debug('Redo batch-update-parent skipped; no blocks exist')
+ break
+ }
+
+ // Process each update
+ for (const update of validUpdates) {
+ const { blockId, newParentId, newPosition, affectedEdges } = update
+
+ // Moving INTO subflow (redoing insert) → remove edges first
if (newParentId && affectedEdges && affectedEdges.length > 0) {
affectedEdges.forEach((edge) => {
- if (!workflowStore.edges.find((e) => e.id === edge.id)) {
- workflowStore.addEdge(edge)
- addToQueue({
- id: crypto.randomUUID(),
- operation: {
- operation: 'add',
- target: 'edge',
- payload: { ...edge, isRedo: true },
- },
- workflowId: activeWorkflowId,
- userId,
- })
+ if (workflowStore.edges.find((e) => e.id === edge.id)) {
+ workflowStore.removeEdge(edge.id)
}
})
+ addToQueue({
+ id: crypto.randomUUID(),
+ operation: {
+ operation: EDGES_OPERATIONS.BATCH_REMOVE_EDGES,
+ target: OPERATION_TARGETS.EDGES,
+ payload: { edgeIds: affectedEdges.map((e) => e.id) },
+ },
+ workflowId: activeWorkflowId,
+ userId,
+ })
+ }
+
+ // Update position and parent locally
+ workflowStore.updateBlockPosition(blockId, newPosition)
+ workflowStore.updateParentId(blockId, newParentId || '', 'parent')
+
+ // Moving OUT of subflow (redoing removal) → restore edges after
+ if (!newParentId && affectedEdges && affectedEdges.length > 0) {
+ const edgesToAdd = affectedEdges.filter(
+ (e) => !workflowStore.edges.find((edge) => edge.id === e.id)
+ )
+ if (edgesToAdd.length > 0) {
+ addToQueue({
+ id: crypto.randomUUID(),
+ operation: {
+ operation: EDGES_OPERATIONS.BATCH_ADD_EDGES,
+ target: OPERATION_TARGETS.EDGES,
+ payload: { edges: edgesToAdd },
+ },
+ workflowId: activeWorkflowId,
+ userId,
+ })
+ edgesToAdd.forEach((edge) => workflowStore.addEdge(edge))
+ }
}
- } else {
- logger.debug('Redo update-parent skipped; block missing', { blockId })
}
+
+ // Send batch update to server
+ addToQueue({
+ id: opId,
+ operation: {
+ operation: BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT,
+ target: OPERATION_TARGETS.BLOCKS,
+ payload: {
+ updates: validUpdates.map((u) => ({
+ id: u.blockId,
+ parentId: u.newParentId || '',
+ position: u.newPosition,
+ })),
+ },
+ },
+ workflowId: activeWorkflowId,
+ userId,
+ })
+
+ logger.debug('Redid batch-update-parent', { updateCount: validUpdates.length })
+ break
+ }
+ case UNDO_REDO_OPERATIONS.BATCH_TOGGLE_ENABLED: {
+ const toggleOp = entry.operation as BatchToggleEnabledOperation
+ const { blockIds, previousStates } = toggleOp.data
+
+ const validBlockIds = blockIds.filter((id) => workflowStore.blocks[id])
+ if (validBlockIds.length === 0) {
+ logger.debug('Redo batch-toggle-enabled skipped; no blocks exist')
+ break
+ }
+
+ addToQueue({
+ id: opId,
+ operation: {
+ operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED,
+ target: OPERATION_TARGETS.BLOCKS,
+ payload: { blockIds: validBlockIds, previousStates },
+ },
+ workflowId: activeWorkflowId,
+ userId,
+ })
+
+ // Use setBlockEnabled to directly set to toggled state
+ // Redo sets to !previousStates (the state after the original toggle)
+ validBlockIds.forEach((blockId) => {
+ workflowStore.setBlockEnabled(blockId, !previousStates[blockId])
+ })
+ break
+ }
+ case UNDO_REDO_OPERATIONS.BATCH_TOGGLE_HANDLES: {
+ const toggleOp = entry.operation as BatchToggleHandlesOperation
+ const { blockIds, previousStates } = toggleOp.data
+
+ const validBlockIds = blockIds.filter((id) => workflowStore.blocks[id])
+ if (validBlockIds.length === 0) {
+ logger.debug('Redo batch-toggle-handles skipped; no blocks exist')
+ break
+ }
+
+ addToQueue({
+ id: opId,
+ operation: {
+ operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_HANDLES,
+ target: OPERATION_TARGETS.BLOCKS,
+ payload: { blockIds: validBlockIds, previousStates },
+ },
+ workflowId: activeWorkflowId,
+ userId,
+ })
+
+ // Use setBlockHandles to directly set to toggled state
+ // Redo sets to !previousStates (the state after the original toggle)
+ validBlockIds.forEach((blockId) => {
+ workflowStore.setBlockHandles(blockId, !previousStates[blockId])
+ })
break
}
- case 'apply-diff': {
+ case UNDO_REDO_OPERATIONS.APPLY_DIFF: {
// Redo apply-diff means re-applying the proposed state with diff markers
const applyDiffOp = entry.operation as any
const { proposedState, diffAnalysis, baselineSnapshot } = applyDiffOp.data
@@ -1114,7 +1504,7 @@ export function useUndoRedo() {
logger.info('Redid apply-diff operation')
break
}
- case 'accept-diff': {
+ case UNDO_REDO_OPERATIONS.ACCEPT_DIFF: {
// Redo accept-diff means re-accepting (stripping markers)
const acceptDiffOp = entry.operation as any
const { afterAccept } = acceptDiffOp.data
@@ -1168,7 +1558,7 @@ export function useUndoRedo() {
logger.info('Redid accept-diff operation - cleared diff view')
break
}
- case 'reject-diff': {
+ case UNDO_REDO_OPERATIONS.REJECT_DIFF: {
// Redo reject-diff means re-rejecting (restoring baseline, clearing diff)
const rejectDiffOp = entry.operation as any
const { afterReject } = rejectDiffOp.data
@@ -1246,7 +1636,7 @@ export function useUndoRedo() {
const operation: any = {
id: crypto.randomUUID(),
- type: 'apply-diff',
+ type: UNDO_REDO_OPERATIONS.APPLY_DIFF,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
@@ -1259,7 +1649,7 @@ export function useUndoRedo() {
const inverse: any = {
id: crypto.randomUUID(),
- type: 'apply-diff',
+ type: UNDO_REDO_OPERATIONS.APPLY_DIFF,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
@@ -1290,7 +1680,7 @@ export function useUndoRedo() {
const operation: any = {
id: crypto.randomUUID(),
- type: 'accept-diff',
+ type: UNDO_REDO_OPERATIONS.ACCEPT_DIFF,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
@@ -1304,7 +1694,7 @@ export function useUndoRedo() {
const inverse: any = {
id: crypto.randomUUID(),
- type: 'accept-diff',
+ type: UNDO_REDO_OPERATIONS.ACCEPT_DIFF,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
@@ -1330,7 +1720,7 @@ export function useUndoRedo() {
const operation: any = {
id: crypto.randomUUID(),
- type: 'reject-diff',
+ type: UNDO_REDO_OPERATIONS.REJECT_DIFF,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
@@ -1344,7 +1734,7 @@ export function useUndoRedo() {
const inverse: any = {
id: crypto.randomUUID(),
- type: 'reject-diff',
+ type: UNDO_REDO_OPERATIONS.REJECT_DIFF,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
@@ -1372,9 +1762,12 @@ export function useUndoRedo() {
recordBatchAddBlocks,
recordBatchRemoveBlocks,
recordAddEdge,
- recordRemoveEdge,
- recordMove,
+ recordBatchRemoveEdges,
+ recordBatchMoveBlocks,
recordUpdateParent,
+ recordBatchUpdateParent,
+ recordBatchToggleEnabled,
+ recordBatchToggleHandles,
recordApplyDiff,
recordAcceptDiff,
recordRejectDiff,
diff --git a/apps/sim/hooks/use-webhook-management.ts b/apps/sim/hooks/use-webhook-management.ts
index e71a0cedb3..3df45eee07 100644
--- a/apps/sim/hooks/use-webhook-management.ts
+++ b/apps/sim/hooks/use-webhook-management.ts
@@ -302,7 +302,11 @@ export function useWebhookManagement({
effectiveTriggerId: string | undefined,
selectedCredentialId: string | null
): Promise => {
- const triggerConfig = useSubBlockStore.getState().getValue(blockId, 'triggerConfig')
+ const triggerConfigRaw = useSubBlockStore.getState().getValue(blockId, 'triggerConfig')
+ const triggerConfig =
+ typeof triggerConfigRaw === 'object' && triggerConfigRaw !== null
+ ? (triggerConfigRaw as Record)
+ : {}
const isCredentialSet = selectedCredentialId?.startsWith(CREDENTIAL_SET_PREFIX)
const credentialSetId = isCredentialSet
diff --git a/apps/sim/lib/logs/execution/logging-factory.ts b/apps/sim/lib/logs/execution/logging-factory.ts
index 5d5e5f8eb3..be7e2d5fc5 100644
--- a/apps/sim/lib/logs/execution/logging-factory.ts
+++ b/apps/sim/lib/logs/execution/logging-factory.ts
@@ -1,3 +1,5 @@
+import { db, workflow } from '@sim/db'
+import { eq } from 'drizzle-orm'
import { BASE_EXECUTION_CHARGE } from '@/lib/billing/constants'
import type { ExecutionEnvironment, ExecutionTrigger, WorkflowState } from '@/lib/logs/types'
import {
@@ -34,7 +36,15 @@ export function createEnvironmentObject(
}
export async function loadWorkflowStateForExecution(workflowId: string): Promise {
- const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
+ const [normalizedData, workflowRecord] = await Promise.all([
+ loadWorkflowFromNormalizedTables(workflowId),
+ db
+ .select({ variables: workflow.variables })
+ .from(workflow)
+ .where(eq(workflow.id, workflowId))
+ .limit(1)
+ .then((rows) => rows[0]),
+ ])
if (!normalizedData) {
throw new Error(
@@ -47,6 +57,7 @@ export async function loadWorkflowStateForExecution(workflowId: string): Promise
edges: normalizedData.edges || [],
loops: normalizedData.loops || {},
parallels: normalizedData.parallels || {},
+ variables: (workflowRecord?.variables as WorkflowState['variables']) || undefined,
}
}
@@ -65,6 +76,7 @@ export async function loadDeployedWorkflowStateForLogging(
edges: deployedData.edges || [],
loops: deployedData.loops || {},
parallels: deployedData.parallels || {},
+ variables: deployedData.variables as WorkflowState['variables'],
}
}
diff --git a/apps/sim/lib/logs/execution/snapshot/service.test.ts b/apps/sim/lib/logs/execution/snapshot/service.test.ts
index 091bdb4a1b..543a2b1a16 100644
--- a/apps/sim/lib/logs/execution/snapshot/service.test.ts
+++ b/apps/sim/lib/logs/execution/snapshot/service.test.ts
@@ -105,7 +105,7 @@ describe('SnapshotService', () => {
block1: {
...baseState.blocks.block1,
// Different block state - we can change outputs to make it different
- outputs: { response: { content: 'different result' } as Record },
+ outputs: { response: { type: 'string', description: 'different result' } },
},
},
}
@@ -177,7 +177,7 @@ describe('SnapshotService', () => {
},
},
outputs: {
- response: { content: 'Agent response' } as Record,
+ response: { type: 'string', description: 'Agent response' },
},
enabled: true,
horizontalHandles: true,
@@ -211,5 +211,113 @@ describe('SnapshotService', () => {
const hash2 = service.computeStateHash(complexState)
expect(hash).toBe(hash2)
})
+
+ test('should include variables in hash computation', () => {
+ const stateWithVariables: WorkflowState = {
+ blocks: {},
+ edges: [],
+ loops: {},
+ parallels: {},
+ variables: {
+ 'var-1': {
+ id: 'var-1',
+ name: 'apiKey',
+ type: 'string',
+ value: 'secret123',
+ },
+ },
+ }
+
+ const stateWithoutVariables: WorkflowState = {
+ blocks: {},
+ edges: [],
+ loops: {},
+ parallels: {},
+ }
+
+ const hashWith = service.computeStateHash(stateWithVariables)
+ const hashWithout = service.computeStateHash(stateWithoutVariables)
+
+ expect(hashWith).not.toBe(hashWithout)
+ })
+
+ test('should detect changes in variable values', () => {
+ const state1: WorkflowState = {
+ blocks: {},
+ edges: [],
+ loops: {},
+ parallels: {},
+ variables: {
+ 'var-1': {
+ id: 'var-1',
+ name: 'myVar',
+ type: 'string',
+ value: 'value1',
+ },
+ },
+ }
+
+ const state2: WorkflowState = {
+ blocks: {},
+ edges: [],
+ loops: {},
+ parallels: {},
+ variables: {
+ 'var-1': {
+ id: 'var-1',
+ name: 'myVar',
+ type: 'string',
+ value: 'value2', // Different value
+ },
+ },
+ }
+
+ const hash1 = service.computeStateHash(state1)
+ const hash2 = service.computeStateHash(state2)
+
+ expect(hash1).not.toBe(hash2)
+ })
+
+ test('should generate consistent hashes for states with variables', () => {
+ const stateWithVariables: WorkflowState = {
+ blocks: {
+ block1: {
+ id: 'block1',
+ name: 'Test',
+ type: 'agent',
+ position: { x: 0, y: 0 },
+ subBlocks: {},
+ outputs: {},
+ enabled: true,
+ horizontalHandles: true,
+ advancedMode: false,
+ height: 0,
+ },
+ },
+ edges: [],
+ loops: {},
+ parallels: {},
+ variables: {
+ 'var-1': {
+ id: 'var-1',
+ name: 'testVar',
+ type: 'plain',
+ value: 'testValue',
+ },
+ 'var-2': {
+ id: 'var-2',
+ name: 'anotherVar',
+ type: 'number',
+ value: 42,
+ },
+ },
+ }
+
+ const hash1 = service.computeStateHash(stateWithVariables)
+ const hash2 = service.computeStateHash(stateWithVariables)
+
+ expect(hash1).toBe(hash2)
+ expect(hash1).toHaveLength(64)
+ })
})
})
diff --git a/apps/sim/lib/logs/execution/snapshot/service.ts b/apps/sim/lib/logs/execution/snapshot/service.ts
index b28e94e529..d753cbbd87 100644
--- a/apps/sim/lib/logs/execution/snapshot/service.ts
+++ b/apps/sim/lib/logs/execution/snapshot/service.ts
@@ -182,11 +182,15 @@ export class SnapshotService implements ISnapshotService {
normalizedParallels[parallelId] = normalizeValue(parallel)
}
+ // 4. Normalize variables (if present)
+ const normalizedVariables = state.variables ? normalizeValue(state.variables) : undefined
+
return {
blocks: normalizedBlocks,
edges: normalizedEdges,
loops: normalizedLoops,
parallels: normalizedParallels,
+ ...(normalizedVariables !== undefined && { variables: normalizedVariables }),
}
}
}
diff --git a/apps/sim/lib/mcp/workflow-mcp-sync.ts b/apps/sim/lib/mcp/workflow-mcp-sync.ts
index c6055a713b..447eeefc6f 100644
--- a/apps/sim/lib/mcp/workflow-mcp-sync.ts
+++ b/apps/sim/lib/mcp/workflow-mcp-sync.ts
@@ -3,6 +3,7 @@ import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils'
+import type { WorkflowState } from '@/stores/workflows/workflow/types'
import { extractInputFormatFromBlocks, generateToolInputSchema } from './workflow-tool-schema'
const logger = createLogger('WorkflowMcpSync')
@@ -59,7 +60,7 @@ export async function syncMcpToolsForWorkflow(options: SyncOptions): Promise ({
}),
}))
+vi.mock('dns', () => ({
+ resolveMx: (
+ _domain: string,
+ callback: (err: Error | null, addresses: { exchange: string; priority: number }[]) => void
+ ) => {
+ callback(null, [{ exchange: 'mail.example.com', priority: 10 }])
+ },
+}))
+
describe('Email Validation', () => {
describe('validateEmail', () => {
it.concurrent('should validate a correct email', async () => {
diff --git a/apps/sim/lib/workflows/autolayout/types.ts b/apps/sim/lib/workflows/autolayout/types.ts
index a20c35715a..7f8cf78190 100644
--- a/apps/sim/lib/workflows/autolayout/types.ts
+++ b/apps/sim/lib/workflows/autolayout/types.ts
@@ -1,5 +1,8 @@
import type { BlockState, Position } from '@/stores/workflows/workflow/types'
+export type { Edge } from 'reactflow'
+export type { Loop, Parallel } from '@/stores/workflows/workflow/types'
+
export interface LayoutOptions {
horizontalSpacing?: number
verticalSpacing?: number
@@ -12,30 +15,6 @@ export interface LayoutResult {
error?: string
}
-export interface Edge {
- id: string
- source: string
- target: string
- sourceHandle?: string | null
- targetHandle?: string | null
-}
-
-export interface Loop {
- id: string
- nodes: string[]
- iterations: number
- loopType: 'for' | 'forEach' | 'while' | 'doWhile'
- forEachItems?: any[] | Record | string // Items or expression
- whileCondition?: string // JS expression that evaluates to boolean
-}
-
-export interface Parallel {
- id: string
- nodes: string[]
- count?: number
- parallelType?: 'count' | 'collection'
-}
-
export interface BlockMetrics {
width: number
height: number
diff --git a/apps/sim/lib/workflows/blocks/block-outputs.ts b/apps/sim/lib/workflows/blocks/block-outputs.ts
index 6ea6eef84f..2fabf9692f 100644
--- a/apps/sim/lib/workflows/blocks/block-outputs.ts
+++ b/apps/sim/lib/workflows/blocks/block-outputs.ts
@@ -11,10 +11,23 @@ import {
USER_FILE_PROPERTY_TYPES,
} from '@/lib/workflows/types'
import { getBlock } from '@/blocks'
-import type { BlockConfig, OutputCondition } from '@/blocks/types'
+import type { BlockConfig, OutputCondition, OutputFieldDefinition } from '@/blocks/types'
import { getTrigger, isTriggerValid } from '@/triggers'
-type OutputDefinition = Record
+type OutputDefinition = Record
+
+interface SubBlockWithValue {
+ value?: unknown
+}
+
+type ConditionValue = string | number | boolean
+
+/**
+ * Checks if a value is a valid primitive for condition comparison.
+ */
+function isConditionPrimitive(value: unknown): value is ConditionValue {
+ return typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean'
+}
/**
* Evaluates an output condition against subBlock values.
@@ -22,7 +35,7 @@ type OutputDefinition = Record
*/
function evaluateOutputCondition(
condition: OutputCondition,
- subBlocks: Record | undefined
+ subBlocks: Record | undefined
): boolean {
if (!subBlocks) return false
@@ -30,7 +43,8 @@ function evaluateOutputCondition(
let matches: boolean
if (Array.isArray(condition.value)) {
- matches = condition.value.includes(fieldValue)
+ // For array conditions, check if fieldValue is a valid primitive and included
+ matches = isConditionPrimitive(fieldValue) && condition.value.includes(fieldValue)
} else {
matches = fieldValue === condition.value
}
@@ -44,7 +58,8 @@ function evaluateOutputCondition(
let andMatches: boolean
if (Array.isArray(condition.and.value)) {
- andMatches = condition.and.value.includes(andFieldValue)
+ andMatches =
+ isConditionPrimitive(andFieldValue) && condition.and.value.includes(andFieldValue)
} else {
andMatches = andFieldValue === condition.and.value
}
@@ -65,7 +80,7 @@ function evaluateOutputCondition(
*/
function filterOutputsByCondition(
outputs: OutputDefinition,
- subBlocks: Record | undefined
+ subBlocks: Record | undefined
): OutputDefinition {
const filtered: OutputDefinition = {}
@@ -119,7 +134,7 @@ function hasInputFormat(blockConfig: BlockConfig): boolean {
}
function getTriggerId(
- subBlocks: Record | undefined,
+ subBlocks: Record | undefined,
blockConfig: BlockConfig
): string | undefined {
const selectedTriggerIdValue = subBlocks?.selectedTriggerId?.value
@@ -136,13 +151,17 @@ function getTriggerId(
)
}
-function getUnifiedStartOutputs(subBlocks: Record | undefined): OutputDefinition {
+function getUnifiedStartOutputs(
+ subBlocks: Record | undefined
+): OutputDefinition {
const outputs = { ...UNIFIED_START_OUTPUTS }
const normalizedInputFormat = normalizeInputFormatValue(subBlocks?.inputFormat?.value)
return applyInputFormatFields(normalizedInputFormat, outputs)
}
-function getLegacyStarterOutputs(subBlocks: Record | undefined): OutputDefinition {
+function getLegacyStarterOutputs(
+ subBlocks: Record | undefined
+): OutputDefinition {
const startWorkflowValue = subBlocks?.startWorkflow?.value
if (startWorkflowValue === 'chat') {
@@ -179,7 +198,7 @@ function shouldClearBaseOutputs(
function applyInputFormatToOutputs(
blockType: string,
blockConfig: BlockConfig,
- subBlocks: Record | undefined,
+ subBlocks: Record | undefined,
baseOutputs: OutputDefinition
): OutputDefinition {
if (!hasInputFormat(blockConfig) || !subBlocks?.inputFormat?.value) {
@@ -203,7 +222,7 @@ function applyInputFormatToOutputs(
export function getBlockOutputs(
blockType: string,
- subBlocks?: Record,
+ subBlocks?: Record,
triggerMode?: boolean
): OutputDefinition {
const blockConfig = getBlock(blockType)
@@ -214,7 +233,8 @@ export function getBlockOutputs(
if (triggerId && isTriggerValid(triggerId)) {
const trigger = getTrigger(triggerId)
if (trigger.outputs) {
- return trigger.outputs
+ // TriggerOutput is compatible with OutputFieldDefinition at runtime
+ return trigger.outputs as OutputDefinition
}
}
}
@@ -226,7 +246,7 @@ export function getBlockOutputs(
}
if (blockType === 'human_in_the_loop') {
- const hitlOutputs: Record = {
+ const hitlOutputs: OutputDefinition = {
url: { type: 'string', description: 'Resume UI URL' },
resumeEndpoint: {
type: 'string',
@@ -251,7 +271,7 @@ export function getBlockOutputs(
if (blockType === 'approval') {
// Start with only url (apiUrl commented out - not accessible as output)
- const pauseResumeOutputs: Record = {
+ const pauseResumeOutputs: OutputDefinition = {
url: { type: 'string', description: 'Resume UI URL' },
// apiUrl: { type: 'string', description: 'Resume API URL' }, // Commented out - not accessible as output
}
@@ -285,7 +305,7 @@ function shouldFilterReservedField(
blockType: string,
key: string,
prefix: string,
- subBlocks: Record | undefined
+ subBlocks: Record | undefined
): boolean {
if (blockType !== TRIGGER_TYPES.START || prefix) {
return false
@@ -308,7 +328,7 @@ function expandFileTypeProperties(path: string): string[] {
function collectOutputPaths(
obj: OutputDefinition,
blockType: string,
- subBlocks: Record | undefined,
+ subBlocks: Record | undefined,
prefix = ''
): string[] {
const paths: string[] = []
@@ -321,13 +341,14 @@ function collectOutputPaths(
}
if (value && typeof value === 'object' && 'type' in value) {
- if (value.type === 'files') {
+ const typedValue = value as { type: unknown }
+ if (typedValue.type === 'files') {
paths.push(...expandFileTypeProperties(path))
} else {
paths.push(path)
}
} else if (value && typeof value === 'object' && !Array.isArray(value)) {
- paths.push(...collectOutputPaths(value, blockType, subBlocks, path))
+ paths.push(...collectOutputPaths(value as OutputDefinition, blockType, subBlocks, path))
} else {
paths.push(path)
}
@@ -338,7 +359,7 @@ function collectOutputPaths(
export function getBlockOutputPaths(
blockType: string,
- subBlocks?: Record,
+ subBlocks?: Record,
triggerMode?: boolean
): string[] {
const outputs = getBlockOutputs(blockType, subBlocks, triggerMode)
@@ -351,39 +372,45 @@ function getFilePropertyType(outputs: OutputDefinition, pathParts: string[]): st
return null
}
- let current: any = outputs
+ let current: unknown = outputs
for (const part of pathParts.slice(0, -1)) {
if (!current || typeof current !== 'object') {
return null
}
- current = current[part]
+ current = (current as Record)[part]
}
- if (current && typeof current === 'object' && 'type' in current && current.type === 'files') {
+ if (
+ current &&
+ typeof current === 'object' &&
+ 'type' in current &&
+ (current as { type: unknown }).type === 'files'
+ ) {
return USER_FILE_PROPERTY_TYPES[lastPart as keyof typeof USER_FILE_PROPERTY_TYPES]
}
return null
}
-function traverseOutputPath(outputs: OutputDefinition, pathParts: string[]): any {
- let current: any = outputs
+function traverseOutputPath(outputs: OutputDefinition, pathParts: string[]): unknown {
+ let current: unknown = outputs
for (const part of pathParts) {
if (!current || typeof current !== 'object') {
return null
}
- current = current[part]
+ current = (current as Record)[part]
}
return current
}
-function extractType(value: any): string {
+function extractType(value: unknown): string {
if (!value) return 'any'
if (typeof value === 'object' && 'type' in value) {
- return value.type
+ const typeValue = (value as { type: unknown }).type
+ return typeof typeValue === 'string' ? typeValue : 'any'
}
return typeof value === 'string' ? value : 'any'
@@ -392,7 +419,7 @@ function extractType(value: any): string {
export function getBlockOutputType(
blockType: string,
outputPath: string,
- subBlocks?: Record,
+ subBlocks?: Record,
triggerMode?: boolean
): string {
const outputs = getBlockOutputs(blockType, subBlocks, triggerMode)
diff --git a/apps/sim/lib/workflows/comparison/compare.ts b/apps/sim/lib/workflows/comparison/compare.ts
index a34521e23b..4f038cd8c2 100644
--- a/apps/sim/lib/workflows/comparison/compare.ts
+++ b/apps/sim/lib/workflows/comparison/compare.ts
@@ -51,8 +51,8 @@ export function hasWorkflowChanged(
}
// 3. Build normalized representations of blocks for comparison
- const normalizedCurrentBlocks: Record = {}
- const normalizedDeployedBlocks: Record = {}
+ const normalizedCurrentBlocks: Record = {}
+ const normalizedDeployedBlocks: Record = {}
for (const blockId of currentBlockIds) {
const currentBlock = currentState.blocks[blockId]
@@ -120,8 +120,9 @@ export function hasWorkflowChanged(
}
// Get values with special handling for null/undefined
- let currentValue = currentSubBlocks[subBlockId].value ?? null
- let deployedValue = deployedSubBlocks[subBlockId].value ?? null
+ // Using unknown type since sanitization functions return different types
+ let currentValue: unknown = currentSubBlocks[subBlockId].value ?? null
+ let deployedValue: unknown = deployedSubBlocks[subBlockId].value ?? null
if (subBlockId === 'tools' && Array.isArray(currentValue) && Array.isArray(deployedValue)) {
currentValue = sanitizeTools(currentValue)
@@ -232,8 +233,8 @@ export function hasWorkflowChanged(
}
// 6. Compare variables
- const currentVariables = normalizeVariables((currentState as any).variables)
- const deployedVariables = normalizeVariables((deployedState as any).variables)
+ const currentVariables = normalizeVariables(currentState.variables)
+ const deployedVariables = normalizeVariables(deployedState.variables)
const normalizedCurrentVars = normalizeValue(
Object.fromEntries(Object.entries(currentVariables).map(([id, v]) => [id, sanitizeVariable(v)]))
diff --git a/apps/sim/lib/workflows/comparison/normalize.test.ts b/apps/sim/lib/workflows/comparison/normalize.test.ts
index c144694564..ca22205876 100644
--- a/apps/sim/lib/workflows/comparison/normalize.test.ts
+++ b/apps/sim/lib/workflows/comparison/normalize.test.ts
@@ -2,6 +2,7 @@
* Tests for workflow normalization utilities
*/
import { describe, expect, it } from 'vitest'
+import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
import {
normalizedStringify,
normalizeEdge,
@@ -39,7 +40,7 @@ describe('Workflow Normalization Utilities', () => {
it.concurrent('should sort object keys alphabetically', () => {
const input = { zebra: 1, apple: 2, mango: 3 }
- const result = normalizeValue(input)
+ const result = normalizeValue(input) as Record
expect(Object.keys(result)).toEqual(['apple', 'mango', 'zebra'])
})
@@ -55,7 +56,10 @@ describe('Workflow Normalization Utilities', () => {
},
first: 'value',
}
- const result = normalizeValue(input)
+ const result = normalizeValue(input) as {
+ first: string
+ outer: { z: number; a: { y: number; b: number } }
+ }
expect(Object.keys(result)).toEqual(['first', 'outer'])
expect(Object.keys(result.outer)).toEqual(['a', 'z'])
@@ -72,11 +76,11 @@ describe('Workflow Normalization Utilities', () => {
it.concurrent('should handle arrays with mixed types', () => {
const input = [1, 'string', { b: 2, a: 1 }, null, [3, 2, 1]]
- const result = normalizeValue(input)
+ const result = normalizeValue(input) as unknown[]
expect(result[0]).toBe(1)
expect(result[1]).toBe('string')
- expect(Object.keys(result[2])).toEqual(['a', 'b'])
+ expect(Object.keys(result[2] as Record)).toEqual(['a', 'b'])
expect(result[3]).toBe(null)
expect(result[4]).toEqual([3, 2, 1]) // Array order preserved
})
@@ -94,7 +98,9 @@ describe('Workflow Normalization Utilities', () => {
},
},
}
- const result = normalizeValue(input)
+ const result = normalizeValue(input) as {
+ level1: { level2: { level3: { level4: { z: string; a: string } } } }
+ }
expect(Object.keys(result.level1.level2.level3.level4)).toEqual(['a', 'z'])
})
@@ -143,7 +149,7 @@ describe('Workflow Normalization Utilities', () => {
})
it.concurrent('should normalize "for" loop type', () => {
- const loop = {
+ const loop: Loop & { extraField?: string } = {
id: 'loop1',
nodes: ['block1', 'block2'],
loopType: 'for',
@@ -164,7 +170,7 @@ describe('Workflow Normalization Utilities', () => {
})
it.concurrent('should normalize "forEach" loop type', () => {
- const loop = {
+ const loop: Loop = {
id: 'loop2',
nodes: ['block1'],
loopType: 'forEach',
@@ -183,10 +189,11 @@ describe('Workflow Normalization Utilities', () => {
})
it.concurrent('should normalize "while" loop type', () => {
- const loop = {
+ const loop: Loop = {
id: 'loop3',
nodes: ['block1', 'block2', 'block3'],
loopType: 'while',
+ iterations: 0,
whileCondition: ' === true',
doWhileCondition: 'should-be-excluded',
}
@@ -201,10 +208,11 @@ describe('Workflow Normalization Utilities', () => {
})
it.concurrent('should normalize "doWhile" loop type', () => {
- const loop = {
+ const loop: Loop = {
id: 'loop4',
nodes: ['block1'],
loopType: 'doWhile',
+ iterations: 0,
doWhileCondition: ' < 100',
whileCondition: 'should-be-excluded',
}
@@ -218,11 +226,11 @@ describe('Workflow Normalization Utilities', () => {
})
})
- it.concurrent('should handle unknown loop type with base fields only', () => {
- const loop = {
+ it.concurrent('should extract only relevant fields for for loop type', () => {
+ const loop: Loop = {
id: 'loop5',
nodes: ['block1'],
- loopType: 'unknown',
+ loopType: 'for',
iterations: 5,
forEachItems: 'items',
}
@@ -231,7 +239,8 @@ describe('Workflow Normalization Utilities', () => {
expect(result).toEqual({
id: 'loop5',
nodes: ['block1'],
- loopType: 'unknown',
+ loopType: 'for',
+ iterations: 5,
})
})
})
@@ -243,7 +252,7 @@ describe('Workflow Normalization Utilities', () => {
})
it.concurrent('should normalize "count" parallel type', () => {
- const parallel = {
+ const parallel: Parallel & { extraField?: string } = {
id: 'parallel1',
nodes: ['block1', 'block2'],
parallelType: 'count',
@@ -262,7 +271,7 @@ describe('Workflow Normalization Utilities', () => {
})
it.concurrent('should normalize "collection" parallel type', () => {
- const parallel = {
+ const parallel: Parallel = {
id: 'parallel2',
nodes: ['block1'],
parallelType: 'collection',
@@ -279,11 +288,11 @@ describe('Workflow Normalization Utilities', () => {
})
})
- it.concurrent('should handle unknown parallel type with base fields only', () => {
- const parallel = {
+ it.concurrent('should include base fields for undefined parallel type', () => {
+ const parallel: Parallel = {
id: 'parallel3',
nodes: ['block1'],
- parallelType: 'unknown',
+ parallelType: undefined,
count: 5,
distribution: 'items',
}
@@ -292,7 +301,7 @@ describe('Workflow Normalization Utilities', () => {
expect(result).toEqual({
id: 'parallel3',
nodes: ['block1'],
- parallelType: 'unknown',
+ parallelType: undefined,
})
})
})
@@ -312,7 +321,7 @@ describe('Workflow Normalization Utilities', () => {
const tools = [
{ id: 'tool1', name: 'Search', isExpanded: true },
{ id: 'tool2', name: 'Calculator', isExpanded: false },
- { id: 'tool3', name: 'Weather' }, // No isExpanded field
+ { id: 'tool3', name: 'Weather' },
]
const result = sanitizeTools(tools)
@@ -365,7 +374,7 @@ describe('Workflow Normalization Utilities', () => {
const inputFormat = [
{ id: 'input1', name: 'Name', value: 'John', collapsed: true },
{ id: 'input2', name: 'Age', value: 25, collapsed: false },
- { id: 'input3', name: 'Email' }, // No value or collapsed
+ { id: 'input3', name: 'Email' },
]
const result = sanitizeInputFormat(inputFormat)
diff --git a/apps/sim/lib/workflows/comparison/normalize.ts b/apps/sim/lib/workflows/comparison/normalize.ts
index bbc60c81ae..571f201138 100644
--- a/apps/sim/lib/workflows/comparison/normalize.ts
+++ b/apps/sim/lib/workflows/comparison/normalize.ts
@@ -3,12 +3,15 @@
* Used by both client-side signature computation and server-side comparison.
*/
+import type { Edge } from 'reactflow'
+import type { Loop, Parallel, Variable } from '@/stores/workflows/workflow/types'
+
/**
* Normalizes a value for consistent comparison by sorting object keys recursively
* @param value - The value to normalize
* @returns A normalized version of the value with sorted keys
*/
-export function normalizeValue(value: any): any {
+export function normalizeValue(value: unknown): unknown {
if (value === null || value === undefined || typeof value !== 'object') {
return value
}
@@ -17,9 +20,9 @@ export function normalizeValue(value: any): any {
return value.map(normalizeValue)
}
- const sorted: Record = {}
- for (const key of Object.keys(value).sort()) {
- sorted[key] = normalizeValue(value[key])
+ const sorted: Record = {}
+ for (const key of Object.keys(value as Record).sort()) {
+ sorted[key] = normalizeValue((value as Record)[key])
}
return sorted
}
@@ -29,19 +32,30 @@ export function normalizeValue(value: any): any {
* @param value - The value to normalize and stringify
* @returns A normalized JSON string
*/
-export function normalizedStringify(value: any): string {
+export function normalizedStringify(value: unknown): string {
return JSON.stringify(normalizeValue(value))
}
+/** Normalized loop result type with only essential fields */
+interface NormalizedLoop {
+ id: string
+ nodes: string[]
+ loopType: Loop['loopType']
+ iterations?: number
+ forEachItems?: Loop['forEachItems']
+ whileCondition?: string
+ doWhileCondition?: string
+}
+
/**
* Normalizes a loop configuration by extracting only the relevant fields for the loop type
* @param loop - The loop configuration object
* @returns Normalized loop with only relevant fields
*/
-export function normalizeLoop(loop: any): any {
+export function normalizeLoop(loop: Loop | null | undefined): NormalizedLoop | null | undefined {
if (!loop) return loop
const { id, nodes, loopType, iterations, forEachItems, whileCondition, doWhileCondition } = loop
- const base: any = { id, nodes, loopType }
+ const base: Pick = { id, nodes, loopType }
switch (loopType) {
case 'for':
@@ -57,15 +71,30 @@ export function normalizeLoop(loop: any): any {
}
}
+/** Normalized parallel result type with only essential fields */
+interface NormalizedParallel {
+ id: string
+ nodes: string[]
+ parallelType: Parallel['parallelType']
+ count?: number
+ distribution?: Parallel['distribution']
+}
+
/**
* Normalizes a parallel configuration by extracting only the relevant fields for the parallel type
* @param parallel - The parallel configuration object
* @returns Normalized parallel with only relevant fields
*/
-export function normalizeParallel(parallel: any): any {
+export function normalizeParallel(
+ parallel: Parallel | null | undefined
+): NormalizedParallel | null | undefined {
if (!parallel) return parallel
const { id, nodes, parallelType, count, distribution } = parallel
- const base: any = { id, nodes, parallelType }
+ const base: Pick = {
+ id,
+ nodes,
+ parallelType,
+ }
switch (parallelType) {
case 'count':
@@ -77,23 +106,37 @@ export function normalizeParallel(parallel: any): any {
}
}
+/** Tool configuration with optional UI-only isExpanded field */
+type ToolWithExpanded = Record & { isExpanded?: boolean }
+
/**
* Sanitizes tools array by removing UI-only fields like isExpanded
* @param tools - Array of tool configurations
* @returns Sanitized tools array
*/
-export function sanitizeTools(tools: any[] | undefined): any[] {
+export function sanitizeTools(tools: unknown[] | undefined): Record[] {
if (!Array.isArray(tools)) return []
- return tools.map(({ isExpanded, ...rest }) => rest)
+ return tools.map((tool) => {
+ if (tool && typeof tool === 'object' && !Array.isArray(tool)) {
+ const { isExpanded, ...rest } = tool as ToolWithExpanded
+ return rest
+ }
+ return tool as Record
+ })
}
+/** Variable with optional UI-only validationError field */
+type VariableWithValidation = Variable & { validationError?: string }
+
/**
* Sanitizes a variable by removing UI-only fields like validationError
* @param variable - The variable object
* @returns Sanitized variable object
*/
-export function sanitizeVariable(variable: any): any {
+export function sanitizeVariable(
+ variable: VariableWithValidation | null | undefined
+): Omit | null | undefined {
if (!variable || typeof variable !== 'object') return variable
const { validationError, ...rest } = variable
return rest
@@ -105,21 +148,38 @@ export function sanitizeVariable(variable: any): any {
* @param variables - The variables to normalize
* @returns A normalized variables object
*/
-export function normalizeVariables(variables: any): Record {
+export function normalizeVariables(variables: unknown): Record {
if (!variables) return {}
if (Array.isArray(variables)) return {}
if (typeof variables !== 'object') return {}
- return variables
+ return variables as Record
}
+/** Input format item with optional UI-only fields */
+type InputFormatItem = Record & { value?: unknown; collapsed?: boolean }
+
/**
* Sanitizes inputFormat array by removing UI-only fields like value and collapsed
* @param inputFormat - Array of input format configurations
* @returns Sanitized input format array
*/
-export function sanitizeInputFormat(inputFormat: any[] | undefined): any[] {
+export function sanitizeInputFormat(inputFormat: unknown[] | undefined): Record[] {
if (!Array.isArray(inputFormat)) return []
- return inputFormat.map(({ value, collapsed, ...rest }) => rest)
+ return inputFormat.map((item) => {
+ if (item && typeof item === 'object' && !Array.isArray(item)) {
+ const { value, collapsed, ...rest } = item as InputFormatItem
+ return rest
+ }
+ return item as Record
+ })
+}
+
+/** Normalized edge with only connection-relevant fields */
+interface NormalizedEdge {
+ source: string
+ sourceHandle?: string | null
+ target: string
+ targetHandle?: string | null
}
/**
@@ -127,12 +187,7 @@ export function sanitizeInputFormat(inputFormat: any[] | undefined): any[] {
* @param edge - The edge object
* @returns Normalized edge with only connection fields
*/
-export function normalizeEdge(edge: any): {
- source: string
- sourceHandle?: string
- target: string
- targetHandle?: string
-} {
+export function normalizeEdge(edge: Edge): NormalizedEdge {
return {
source: edge.source,
sourceHandle: edge.sourceHandle,
@@ -147,8 +202,18 @@ export function normalizeEdge(edge: any): {
* @returns Sorted array of normalized edges
*/
export function sortEdges(
- edges: Array<{ source: string; sourceHandle?: string; target: string; targetHandle?: string }>
-): Array<{ source: string; sourceHandle?: string; target: string; targetHandle?: string }> {
+ edges: Array<{
+ source: string
+ sourceHandle?: string | null
+ target: string
+ targetHandle?: string | null
+ }>
+): Array<{
+ source: string
+ sourceHandle?: string | null
+ target: string
+ targetHandle?: string | null
+}> {
return [...edges].sort((a, b) =>
`${a.source}-${a.sourceHandle}-${a.target}-${a.targetHandle}`.localeCompare(
`${b.source}-${b.sourceHandle}-${b.target}-${b.targetHandle}`
diff --git a/apps/sim/lib/workflows/credentials/credential-extractor.ts b/apps/sim/lib/workflows/credentials/credential-extractor.ts
index 014febabc5..2fb757ba49 100644
--- a/apps/sim/lib/workflows/credentials/credential-extractor.ts
+++ b/apps/sim/lib/workflows/credentials/credential-extractor.ts
@@ -1,6 +1,15 @@
import { getBlock } from '@/blocks/registry'
import type { SubBlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types'
+import type { BlockState, SubBlockState, WorkflowState } from '@/stores/workflows/workflow/types'
+
+/** Condition type for SubBlock visibility - mirrors the inline type from blocks/types.ts */
+interface SubBlockCondition {
+ field: string
+ value: string | number | boolean | Array | undefined
+ not?: boolean
+ and?: SubBlockCondition
+}
// Credential types based on actual patterns in the codebase
export enum CredentialType {
@@ -48,7 +57,9 @@ const WORKSPACE_SPECIFIC_FIELDS = new Set([
* Extract required credentials from a workflow state
* This analyzes all blocks and their subblocks to identify credential requirements
*/
-export function extractRequiredCredentials(state: any): CredentialRequirement[] {
+export function extractRequiredCredentials(
+ state: Partial | null | undefined
+): CredentialRequirement[] {
const credentials: CredentialRequirement[] = []
const seen = new Set()
@@ -57,7 +68,7 @@ export function extractRequiredCredentials(state: any): CredentialRequirement[]
}
// Process each block
- Object.values(state.blocks).forEach((block: any) => {
+ Object.values(state.blocks).forEach((block: BlockState) => {
if (!block?.type) return
const blockConfig = getBlock(block.type)
@@ -104,8 +115,8 @@ export function extractRequiredCredentials(state: any): CredentialRequirement[]
})
})
- // Helper to check visibility, respecting mode and conditions
- function isSubBlockVisible(block: any, subBlockConfig: SubBlockConfig): boolean {
+ /** Helper to check visibility, respecting mode and conditions */
+ function isSubBlockVisible(block: BlockState, subBlockConfig: SubBlockConfig): boolean {
const mode = subBlockConfig.mode ?? 'both'
if (mode === 'trigger' && !block?.triggerMode) return false
if (mode === 'basic' && block?.advancedMode) return false
@@ -118,7 +129,7 @@ export function extractRequiredCredentials(state: any): CredentialRequirement[]
? subBlockConfig.condition()
: subBlockConfig.condition
- const evaluate = (cond: any): boolean => {
+ const evaluate = (cond: SubBlockCondition): boolean => {
const currentValue = block?.subBlocks?.[cond.field]?.value
const expected = cond.value
@@ -126,7 +137,7 @@ export function extractRequiredCredentials(state: any): CredentialRequirement[]
expected === undefined
? true
: Array.isArray(expected)
- ? expected.includes(currentValue)
+ ? expected.includes(currentValue as string)
: currentValue === expected
if (cond.not) match = !match
@@ -161,6 +172,12 @@ function formatFieldName(fieldName: string): string {
.join(' ')
}
+/** Block state with mutable subBlocks for sanitization */
+interface MutableBlockState extends Omit {
+ subBlocks: Record
+ data?: Record
+}
+
/**
* Remove malformed subBlocks from a block that may have been created by bugs.
* This includes subBlocks with:
@@ -168,12 +185,12 @@ function formatFieldName(fieldName: string): string {
* - Missing required `id` field
* - Type "unknown" (indicates malformed data)
*/
-function removeMalformedSubBlocks(block: any): void {
+function removeMalformedSubBlocks(block: MutableBlockState): void {
if (!block.subBlocks) return
const keysToRemove: string[] = []
- Object.entries(block.subBlocks).forEach(([key, subBlock]: [string, any]) => {
+ Object.entries(block.subBlocks).forEach(([key, subBlock]) => {
// Flag subBlocks with invalid keys (literal "undefined" string)
if (key === 'undefined') {
keysToRemove.push(key)
@@ -187,7 +204,8 @@ function removeMalformedSubBlocks(block: any): void {
}
// Flag subBlocks with type "unknown" (malformed data)
- if (subBlock.type === 'unknown') {
+ // Cast to string for comparison since SubBlockType doesn't include 'unknown'
+ if ((subBlock.type as string) === 'unknown') {
keysToRemove.push(key)
return
}
@@ -204,6 +222,12 @@ function removeMalformedSubBlocks(block: any): void {
})
}
+/** Sanitized workflow state structure */
+interface SanitizedWorkflowState {
+ blocks?: Record
+ [key: string]: unknown
+}
+
/**
* Sanitize workflow state by removing all credentials and workspace-specific data
* This is used for both template creation and workflow export to ensure consistency
@@ -212,18 +236,18 @@ function removeMalformedSubBlocks(block: any): void {
* @param options - Options for sanitization behavior
*/
export function sanitizeWorkflowForSharing(
- state: any,
+ state: Partial | null | undefined,
options: {
preserveEnvVars?: boolean // Keep {{VAR}} references for export
} = {}
-): any {
- const sanitized = JSON.parse(JSON.stringify(state)) // Deep clone
+): SanitizedWorkflowState {
+ const sanitized = JSON.parse(JSON.stringify(state)) as SanitizedWorkflowState // Deep clone
if (!sanitized?.blocks) {
return sanitized
}
- Object.values(sanitized.blocks).forEach((block: any) => {
+ Object.values(sanitized.blocks).forEach((block: MutableBlockState) => {
if (!block?.type) return
// First, remove any malformed subBlocks that may have been created by bugs
@@ -239,7 +263,7 @@ export function sanitizeWorkflowForSharing(
// Clear OAuth credentials (type: 'oauth-input')
if (subBlockConfig.type === 'oauth-input') {
- block.subBlocks[subBlockConfig.id].value = null
+ block.subBlocks[subBlockConfig.id]!.value = null
}
// Clear secret fields (password: true)
@@ -247,24 +271,24 @@ export function sanitizeWorkflowForSharing(
// Preserve environment variable references if requested
if (
options.preserveEnvVars &&
- typeof subBlock.value === 'string' &&
+ typeof subBlock?.value === 'string' &&
subBlock.value.startsWith('{{') &&
subBlock.value.endsWith('}}')
) {
// Keep the env var reference
} else {
- block.subBlocks[subBlockConfig.id].value = null
+ block.subBlocks[subBlockConfig.id]!.value = null
}
}
// Clear workspace-specific selectors
else if (WORKSPACE_SPECIFIC_TYPES.has(subBlockConfig.type)) {
- block.subBlocks[subBlockConfig.id].value = null
+ block.subBlocks[subBlockConfig.id]!.value = null
}
// Clear workspace-specific fields by ID
else if (WORKSPACE_SPECIFIC_FIELDS.has(subBlockConfig.id)) {
- block.subBlocks[subBlockConfig.id].value = null
+ block.subBlocks[subBlockConfig.id]!.value = null
}
}
})
@@ -272,9 +296,9 @@ export function sanitizeWorkflowForSharing(
// Process subBlocks without config (fallback)
if (block.subBlocks) {
- Object.entries(block.subBlocks).forEach(([key, subBlock]: [string, any]) => {
+ Object.entries(block.subBlocks).forEach(([key, subBlock]) => {
// Clear workspace-specific fields by key name
- if (WORKSPACE_SPECIFIC_FIELDS.has(key)) {
+ if (WORKSPACE_SPECIFIC_FIELDS.has(key) && subBlock) {
subBlock.value = null
}
})
@@ -282,14 +306,14 @@ export function sanitizeWorkflowForSharing(
// Clear data field (for backward compatibility)
if (block.data) {
- Object.entries(block.data).forEach(([key, value]: [string, any]) => {
+ Object.entries(block.data).forEach(([key]) => {
// Clear anything that looks like credentials
if (/credential|oauth|api[_-]?key|token|secret|auth|password|bearer/i.test(key)) {
- block.data[key] = null
+ block.data![key] = null
}
// Clear workspace-specific data
if (WORKSPACE_SPECIFIC_FIELDS.has(key)) {
- block.data[key] = null
+ block.data![key] = null
}
})
}
@@ -302,7 +326,9 @@ export function sanitizeWorkflowForSharing(
* Sanitize workflow state for templates (removes credentials and workspace data)
* Wrapper for backward compatibility
*/
-export function sanitizeCredentials(state: any): any {
+export function sanitizeCredentials(
+ state: Partial | null | undefined
+): SanitizedWorkflowState {
return sanitizeWorkflowForSharing(state, { preserveEnvVars: false })
}
@@ -310,6 +336,8 @@ export function sanitizeCredentials(state: any): any {
* Sanitize workflow state for export (preserves env vars)
* Convenience wrapper for workflow export
*/
-export function sanitizeForExport(state: any): any {
+export function sanitizeForExport(
+ state: Partial | null | undefined
+): SanitizedWorkflowState {
return sanitizeWorkflowForSharing(state, { preserveEnvVars: true })
}
diff --git a/apps/sim/lib/workflows/diff/diff-engine.ts b/apps/sim/lib/workflows/diff/diff-engine.ts
index 3efb6831ae..f22365d145 100644
--- a/apps/sim/lib/workflows/diff/diff-engine.ts
+++ b/apps/sim/lib/workflows/diff/diff-engine.ts
@@ -245,10 +245,10 @@ function computeFieldDiff(
const unchangedFields: string[] = []
// Check basic fields
- const fieldsToCheck = ['type', 'name', 'enabled', 'triggerMode', 'horizontalHandles']
+ const fieldsToCheck = ['type', 'name', 'enabled', 'triggerMode', 'horizontalHandles'] as const
for (const field of fieldsToCheck) {
- const currentValue = (currentBlock as any)[field]
- const proposedValue = (proposedBlock as any)[field]
+ const currentValue = currentBlock[field]
+ const proposedValue = proposedBlock[field]
if (JSON.stringify(currentValue) !== JSON.stringify(proposedValue)) {
changedFields.push(field)
} else if (currentValue !== undefined) {
@@ -363,7 +363,7 @@ export class WorkflowDiffEngine {
}
// Call the API route to create the diff
- const body: any = {
+ const body: Record = {
jsonContent,
currentWorkflowState: mergedBaseline,
}
@@ -859,7 +859,7 @@ export class WorkflowDiffEngine {
const proposedEdgeSet = new Set()
// Create edge identifiers for current state (using sim-agent format)
- mergedBaseline.edges.forEach((edge: any) => {
+ mergedBaseline.edges.forEach((edge: Edge) => {
const edgeId = `${edge.source}-${edge.sourceHandle || 'source'}-${edge.target}-${edge.targetHandle || 'target'}`
currentEdgeSet.add(edgeId)
})
@@ -992,7 +992,7 @@ export class WorkflowDiffEngine {
}
// Call the API route to merge the diff
- const body: any = {
+ const body: Record = {
existingDiff: this.currentDiff,
jsonContent,
}
diff --git a/apps/sim/lib/workflows/executor/execute-workflow.ts b/apps/sim/lib/workflows/executor/execute-workflow.ts
index b16e6ea820..ce6f4c2c0d 100644
--- a/apps/sim/lib/workflows/executor/execute-workflow.ts
+++ b/apps/sim/lib/workflows/executor/execute-workflow.ts
@@ -5,6 +5,7 @@ import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { ExecutionMetadata } from '@/executor/execution/types'
+import type { ExecutionResult, StreamingExecution } from '@/executor/types'
const logger = createLogger('WorkflowExecution')
@@ -13,8 +14,8 @@ export interface ExecuteWorkflowOptions {
selectedOutputs?: string[]
isSecureMode?: boolean
workflowTriggerType?: 'api' | 'chat'
- onStream?: (streamingExec: any) => Promise
- onBlockComplete?: (blockId: string, output: any) => Promise
+ onStream?: (streamingExec: StreamingExecution) => Promise
+ onBlockComplete?: (blockId: string, output: unknown) => Promise
skipLoggingComplete?: boolean
}
@@ -29,11 +30,11 @@ export interface WorkflowInfo {
export async function executeWorkflow(
workflow: WorkflowInfo,
requestId: string,
- input: any | undefined,
+ input: unknown | undefined,
actorUserId: string,
streamConfig?: ExecuteWorkflowOptions,
providedExecutionId?: string
-): Promise {
+): Promise {
if (!workflow.workspaceId) {
throw new Error(`Workflow ${workflow.id} has no workspaceId`)
}
@@ -71,7 +72,7 @@ export async function executeWorkflow(
callbacks: {
onStream: streamConfig?.onStream,
onBlockComplete: streamConfig?.onBlockComplete
- ? async (blockId: string, _blockName: string, _blockType: string, output: any) => {
+ ? async (blockId: string, _blockName: string, _blockType: string, output: unknown) => {
await streamConfig.onBlockComplete!(blockId, output)
}
: undefined,
@@ -119,7 +120,7 @@ export async function executeWorkflow(
}
return result
- } catch (error: any) {
+ } catch (error: unknown) {
logger.error(`[${requestId}] Workflow execution failed:`, error)
throw error
}
diff --git a/apps/sim/lib/workflows/executor/execution-core.ts b/apps/sim/lib/workflows/executor/execution-core.ts
index 9e81d8711a..0eeb946542 100644
--- a/apps/sim/lib/workflows/executor/execution-core.ts
+++ b/apps/sim/lib/workflows/executor/execution-core.ts
@@ -19,8 +19,12 @@ import { updateWorkflowRunCounts } from '@/lib/workflows/utils'
import { Executor } from '@/executor'
import { REFERENCE } from '@/executor/constants'
import type { ExecutionSnapshot } from '@/executor/execution/snapshot'
-import type { ExecutionCallbacks, IterationContext } from '@/executor/execution/types'
-import type { ExecutionResult } from '@/executor/types'
+import type {
+ ContextExtensions,
+ ExecutionCallbacks,
+ IterationContext,
+} from '@/executor/execution/types'
+import type { ExecutionResult, NormalizedBlockOutput } from '@/executor/types'
import { createEnvVarPattern } from '@/executor/utils/reference-validation'
import { Serializer } from '@/serializer'
import { mergeSubblockState } from '@/stores/workflows/server-utils'
@@ -41,7 +45,7 @@ export interface ExecuteWorkflowCoreOptions {
abortSignal?: AbortSignal
}
-function parseVariableValueByType(value: any, type: string): any {
+function parseVariableValueByType(value: unknown, type: string): unknown {
if (value === null || value === undefined) {
switch (type) {
case 'number':
@@ -262,7 +266,7 @@ export async function executeWorkflowCore(
const filteredEdges = edges
// Check if this is a resume execution before trigger resolution
- const resumeFromSnapshot = (metadata as any).resumeFromSnapshot === true
+ const resumeFromSnapshot = metadata.resumeFromSnapshot === true
const resumePendingQueue = snapshot.state?.pendingQueue
let resolvedTriggerBlockId = triggerBlockId
@@ -321,7 +325,7 @@ export async function executeWorkflowCore(
blockId: string,
blockName: string,
blockType: string,
- output: any,
+ output: { input?: unknown; output: NormalizedBlockOutput; executionTime: number },
iterationContext?: IterationContext
) => {
await loggingSession.onBlockComplete(blockId, blockName, blockType, output)
@@ -330,7 +334,7 @@ export async function executeWorkflowCore(
}
}
- const contextExtensions: any = {
+ const contextExtensions: ContextExtensions = {
stream: !!onStream,
selectedOutputs,
executionId,
@@ -342,7 +346,12 @@ export async function executeWorkflowCore(
onStream,
resumeFromSnapshot,
resumePendingQueue,
- remainingEdges: snapshot.state?.remainingEdges,
+ remainingEdges: snapshot.state?.remainingEdges?.map((edge) => ({
+ source: edge.source,
+ target: edge.target,
+ sourceHandle: edge.sourceHandle ?? undefined,
+ targetHandle: edge.targetHandle ?? undefined,
+ })),
dagIncomingEdges: snapshot.state?.dagIncomingEdges,
snapshotState: snapshot.state,
metadata,
@@ -363,7 +372,7 @@ export async function executeWorkflowCore(
// Convert initial workflow variables to their native types
if (workflowVariables) {
for (const [varId, variable] of Object.entries(workflowVariables)) {
- const v = variable as any
+ const v = variable as { value?: unknown; type?: string }
if (v.value !== undefined && v.type) {
v.value = parseVariableValueByType(v.value, v.type)
}
@@ -432,18 +441,23 @@ export async function executeWorkflowCore(
})
return result
- } catch (error: any) {
+ } catch (error: unknown) {
logger.error(`[${requestId}] Execution failed:`, error)
- const executionResult = (error as any)?.executionResult
+ const errorWithResult = error as {
+ executionResult?: ExecutionResult
+ message?: string
+ stack?: string
+ }
+ const executionResult = errorWithResult?.executionResult
const { traceSpans } = executionResult ? buildTraceSpans(executionResult) : { traceSpans: [] }
await loggingSession.safeCompleteWithError({
endedAt: new Date().toISOString(),
totalDurationMs: executionResult?.metadata?.duration || 0,
error: {
- message: error.message || 'Execution failed',
- stackTrace: error.stack,
+ message: errorWithResult?.message || 'Execution failed',
+ stackTrace: errorWithResult?.stack,
},
traceSpans,
})
diff --git a/apps/sim/lib/workflows/executor/human-in-the-loop-manager.ts b/apps/sim/lib/workflows/executor/human-in-the-loop-manager.ts
index 5c10de594b..f695e8dc69 100644
--- a/apps/sim/lib/workflows/executor/human-in-the-loop-manager.ts
+++ b/apps/sim/lib/workflows/executor/human-in-the-loop-manager.ts
@@ -2,13 +2,14 @@ import { randomUUID } from 'crypto'
import { db } from '@sim/db'
import { pausedExecutions, resumeQueue, workflowExecutionLogs } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
-import { and, asc, desc, eq, inArray, lt, sql } from 'drizzle-orm'
+import { and, asc, desc, eq, inArray, lt, type SQL, sql } from 'drizzle-orm'
import type { Edge } from 'reactflow'
import { preprocessExecution } from '@/lib/execution/preprocessing'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { ExecutionResult, PausePoint, SerializedSnapshot } from '@/executor/types'
+import type { SerializedConnection } from '@/serializer/types'
const logger = createLogger('HumanInTheLoopManager')
@@ -18,7 +19,7 @@ interface ResumeQueueEntrySummary {
parentExecutionId: string
newExecutionId: string
contextId: string
- resumeInput: any
+ resumeInput: unknown
status: string
queuedAt: string | null
claimedAt: string | null
@@ -69,7 +70,7 @@ interface PersistPauseResultArgs {
interface EnqueueResumeArgs {
executionId: string
contextId: string
- resumeInput: any
+ resumeInput: unknown
userId: string
}
@@ -85,7 +86,7 @@ type EnqueueResumeResult =
resumeEntryId: string
pausedExecution: typeof pausedExecutions.$inferSelect
contextId: string
- resumeInput: any
+ resumeInput: unknown
userId: string
}
@@ -94,7 +95,7 @@ interface StartResumeExecutionArgs {
resumeExecutionId: string
pausedExecution: typeof pausedExecutions.$inferSelect
contextId: string
- resumeInput: any
+ resumeInput: unknown
userId: string
}
@@ -365,7 +366,7 @@ export class PauseResumeManager {
resumeExecutionId: string
pausedExecution: typeof pausedExecutions.$inferSelect
contextId: string
- resumeInput: any
+ resumeInput: unknown
userId: string
}): Promise {
const { resumeExecutionId, pausedExecution, contextId, resumeInput, userId } = args
@@ -408,9 +409,8 @@ export class PauseResumeManager {
const rawPauseBlockId = pausePoint.blockId ?? contextId
const pauseBlockId = PauseResumeManager.normalizePauseBlockId(rawPauseBlockId)
- const dagIncomingEdgesFromSnapshot: Record | undefined = (
- baseSnapshot.state as any
- )?.dagIncomingEdges
+ const dagIncomingEdgesFromSnapshot: Record | undefined =
+ baseSnapshot.state?.dagIncomingEdges
const downstreamBlocks = dagIncomingEdgesFromSnapshot
? Object.entries(dagIncomingEdgesFromSnapshot)
@@ -424,9 +424,10 @@ export class PauseResumeManager {
.map(([nodeId]) => nodeId)
: baseSnapshot.workflow.connections
.filter(
- (conn: any) => PauseResumeManager.normalizePauseBlockId(conn.source) === pauseBlockId
+ (conn: SerializedConnection) =>
+ PauseResumeManager.normalizePauseBlockId(conn.source) === pauseBlockId
)
- .map((conn: any) => conn.target)
+ .map((conn: SerializedConnection) => conn.target)
logger.info('Found downstream blocks', {
pauseBlockId,
@@ -448,7 +449,7 @@ export class PauseResumeManager {
if (stateCopy) {
const dagIncomingEdges: Record | undefined =
- (stateCopy as any)?.dagIncomingEdges || dagIncomingEdgesFromSnapshot
+ stateCopy.dagIncomingEdges || dagIncomingEdgesFromSnapshot
// Calculate the pause duration (time from pause to resume)
const pauseDurationMs = pausedExecution.pausedAt
@@ -617,11 +618,11 @@ export class PauseResumeManager {
// If we didn't find any edges via the DAG snapshot, fall back to workflow connections
if (edgesToRemove.length === 0 && baseSnapshot.workflow.connections?.length) {
edgesToRemove = baseSnapshot.workflow.connections
- .filter((conn: any) =>
+ .filter((conn: SerializedConnection) =>
completedPauseContexts.has(PauseResumeManager.normalizePauseBlockId(conn.source))
)
- .map((conn: any) => ({
- id: conn.id ?? `${conn.source}→${conn.target}`,
+ .map((conn: SerializedConnection) => ({
+ id: `${conn.source}→${conn.target}`,
source: conn.source,
target: conn.target,
sourceHandle: conn.sourceHandle,
@@ -630,11 +631,11 @@ export class PauseResumeManager {
}
} else {
edgesToRemove = baseSnapshot.workflow.connections
- .filter((conn: any) =>
+ .filter((conn: SerializedConnection) =>
completedPauseContexts.has(PauseResumeManager.normalizePauseBlockId(conn.source))
)
- .map((conn: any) => ({
- id: conn.id ?? `${conn.source}→${conn.target}`,
+ .map((conn: SerializedConnection) => ({
+ id: `${conn.source}→${conn.target}`,
source: conn.source,
target: conn.target,
sourceHandle: conn.sourceHandle,
@@ -913,7 +914,7 @@ export class PauseResumeManager {
}): Promise {
const { workflowId, status } = options
- let whereClause: any = eq(pausedExecutions.workflowId, workflowId)
+ let whereClause: SQL | undefined = eq(pausedExecutions.workflowId, workflowId)
if (status) {
const statuses = Array.isArray(status)
@@ -924,7 +925,7 @@ export class PauseResumeManager {
if (statuses.length === 1) {
whereClause = and(whereClause, eq(pausedExecutions.status, statuses[0]))
} else if (statuses.length > 1) {
- whereClause = and(whereClause, inArray(pausedExecutions.status, statuses as any))
+ whereClause = and(whereClause, inArray(pausedExecutions.status, statuses))
}
}
@@ -1129,16 +1130,16 @@ export class PauseResumeManager {
}
private static mapPausePoints(
- pausePoints: any,
+ pausePoints: unknown,
queuePositions?: Map,
latestEntries?: Map
): PausePointWithQueue[] {
- const record = pausePoints as Record
+ const record = pausePoints as Record | null
if (!record) {
return []
}
- return Object.values(record).map((point: any) => {
+ return Object.values(record).map((point: PausePoint) => {
const queuePosition = queuePositions?.get(point.contextId ?? '') ?? null
const latestEntry = latestEntries?.get(point.contextId ?? '')
diff --git a/apps/sim/lib/workflows/operations/import-export.ts b/apps/sim/lib/workflows/operations/import-export.ts
index f5dbd52a92..b446ea1083 100644
--- a/apps/sim/lib/workflows/operations/import-export.ts
+++ b/apps/sim/lib/workflows/operations/import-export.ts
@@ -1,7 +1,7 @@
import { createLogger } from '@sim/logger'
import JSZip from 'jszip'
import { sanitizeForExport } from '@/lib/workflows/sanitization/json-sanitizer'
-import type { WorkflowState } from '@/stores/workflows/workflow/types'
+import type { Variable, WorkflowState } from '@/stores/workflows/workflow/types'
const logger = createLogger('WorkflowImportExport')
@@ -14,12 +14,7 @@ export interface WorkflowExportData {
folderId?: string | null
}
state: WorkflowState
- variables?: Array<{
- id: string
- name: string
- type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain'
- value: any
- }>
+ variables?: Record
}
export interface FolderExportData {
diff --git a/apps/sim/lib/workflows/persistence/utils.ts b/apps/sim/lib/workflows/persistence/utils.ts
index b115321202..d6ccaa90f9 100644
--- a/apps/sim/lib/workflows/persistence/utils.ts
+++ b/apps/sim/lib/workflows/persistence/utils.ts
@@ -9,7 +9,7 @@ import {
workflowSubflows,
} from '@sim/db'
import { createLogger } from '@sim/logger'
-import type { InferSelectModel } from 'drizzle-orm'
+import type { InferInsertModel, InferSelectModel } from 'drizzle-orm'
import { and, desc, eq, sql } from 'drizzle-orm'
import type { Edge } from 'reactflow'
import { v4 as uuidv4 } from 'uuid'
@@ -22,6 +22,8 @@ import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/w
const logger = createLogger('WorkflowDBHelpers')
export type WorkflowDeploymentVersion = InferSelectModel
+type WebhookRecord = InferSelectModel
+type SubflowInsert = InferInsertModel
export interface WorkflowDeploymentVersionResponse {
id: string
@@ -43,7 +45,7 @@ export interface NormalizedWorkflowData {
export interface DeployedWorkflowData extends NormalizedWorkflowData {
deploymentVersionId: string
- variables?: Record
+ variables?: Record
}
export async function blockExistsInDeployment(
@@ -96,7 +98,7 @@ export async function loadDeployedWorkflowState(workflowId: string): Promise }
+ const state = active.state as WorkflowState & { variables?: Record }
return {
blocks: state.blocks || {},
@@ -336,7 +338,7 @@ export async function saveWorkflowToNormalizedTables(
// Start a transaction
await db.transaction(async (tx) => {
// Snapshot existing webhooks before deletion to preserve them through the cycle
- let existingWebhooks: any[] = []
+ let existingWebhooks: WebhookRecord[] = []
try {
existingWebhooks = await tx.select().from(webhook).where(eq(webhook.workflowId, workflowId))
} catch (webhookError) {
@@ -392,7 +394,7 @@ export async function saveWorkflowToNormalizedTables(
}
// Insert subflows (loops and parallels)
- const subflowInserts: any[] = []
+ const subflowInserts: SubflowInsert[] = []
// Add loops
Object.values(canonicalLoops).forEach((loop) => {
@@ -571,7 +573,7 @@ export async function deployWorkflow(params: {
const blockTypeCounts: Record = {}
for (const block of Object.values(currentState.blocks)) {
- const blockType = (block as any).type || 'unknown'
+ const blockType = block.type || 'unknown'
blockTypeCounts[blockType] = (blockTypeCounts[blockType] || 0) + 1
}
@@ -605,11 +607,33 @@ export async function deployWorkflow(params: {
}
}
+/** Input state for ID regeneration - partial to handle external sources */
+export interface RegenerateStateInput {
+ blocks?: Record
+ edges?: Edge[]
+ loops?: Record
+ parallels?: Record
+ lastSaved?: number
+ variables?: Record
+ metadata?: Record
+}
+
+/** Output state after ID regeneration */
+interface RegenerateStateOutput {
+ blocks: Record
+ edges: Edge[]
+ loops: Record
+ parallels: Record
+ lastSaved: number
+ variables?: Record
+ metadata?: Record
+}
+
/**
* Regenerates all IDs in a workflow state to avoid conflicts when duplicating or using templates
* Returns a new state with all IDs regenerated and references updated
*/
-export function regenerateWorkflowStateIds(state: any): any {
+export function regenerateWorkflowStateIds(state: RegenerateStateInput): RegenerateStateOutput {
// Create ID mappings
const blockIdMapping = new Map()
const edgeIdMapping = new Map()
@@ -624,7 +648,7 @@ export function regenerateWorkflowStateIds(state: any): any {
// Map edge IDs
- ;(state.edges || []).forEach((edge: any) => {
+ ;(state.edges || []).forEach((edge: Edge) => {
edgeIdMapping.set(edge.id, crypto.randomUUID())
})
@@ -639,28 +663,28 @@ export function regenerateWorkflowStateIds(state: any): any {
})
// Second pass: Create new state with regenerated IDs and updated references
- const newBlocks: Record = {}
- const newEdges: any[] = []
- const newLoops: Record = {}
- const newParallels: Record = {}
+ const newBlocks: Record = {}
+ const newEdges: Edge[] = []
+ const newLoops: Record = {}
+ const newParallels: Record = {}
// Regenerate blocks with updated references
- Object.entries(state.blocks || {}).forEach(([oldId, block]: [string, any]) => {
+ Object.entries(state.blocks || {}).forEach(([oldId, block]) => {
const newId = blockIdMapping.get(oldId)!
- const newBlock = { ...block, id: newId }
+ const newBlock: BlockState = { ...block, id: newId }
// Update parentId reference if it exists
if (newBlock.data?.parentId) {
const newParentId = blockIdMapping.get(newBlock.data.parentId)
if (newParentId) {
- newBlock.data.parentId = newParentId
+ newBlock.data = { ...newBlock.data, parentId: newParentId }
}
}
// Update any block references in subBlocks
if (newBlock.subBlocks) {
- const updatedSubBlocks: Record = {}
- Object.entries(newBlock.subBlocks).forEach(([subId, subBlock]: [string, any]) => {
+ const updatedSubBlocks: Record = {}
+ Object.entries(newBlock.subBlocks).forEach(([subId, subBlock]) => {
const updatedSubBlock = { ...subBlock }
// If subblock value contains block references, update them
@@ -668,7 +692,7 @@ export function regenerateWorkflowStateIds(state: any): any {
typeof updatedSubBlock.value === 'string' &&
blockIdMapping.has(updatedSubBlock.value)
) {
- updatedSubBlock.value = blockIdMapping.get(updatedSubBlock.value)
+ updatedSubBlock.value = blockIdMapping.get(updatedSubBlock.value) ?? updatedSubBlock.value
}
updatedSubBlocks[subId] = updatedSubBlock
@@ -681,7 +705,7 @@ export function regenerateWorkflowStateIds(state: any): any {
// Regenerate edges with updated source/target references
- ;(state.edges || []).forEach((edge: any) => {
+ ;(state.edges || []).forEach((edge: Edge) => {
const newId = edgeIdMapping.get(edge.id)!
const newSource = blockIdMapping.get(edge.source) || edge.source
const newTarget = blockIdMapping.get(edge.target) || edge.target
@@ -695,9 +719,9 @@ export function regenerateWorkflowStateIds(state: any): any {
})
// Regenerate loops with updated node references
- Object.entries(state.loops || {}).forEach(([oldId, loop]: [string, any]) => {
+ Object.entries(state.loops || {}).forEach(([oldId, loop]) => {
const newId = loopIdMapping.get(oldId)!
- const newLoop = { ...loop, id: newId }
+ const newLoop: Loop = { ...loop, id: newId }
// Update nodes array with new block IDs
if (newLoop.nodes) {
@@ -708,9 +732,9 @@ export function regenerateWorkflowStateIds(state: any): any {
})
// Regenerate parallels with updated node references
- Object.entries(state.parallels || {}).forEach(([oldId, parallel]: [string, any]) => {
+ Object.entries(state.parallels || {}).forEach(([oldId, parallel]) => {
const newId = parallelIdMapping.get(oldId)!
- const newParallel = { ...parallel, id: newId }
+ const newParallel: Parallel = { ...parallel, id: newId }
// Update nodes array with new block IDs
if (newParallel.nodes) {
diff --git a/apps/sim/lib/workflows/sanitization/json-sanitizer.ts b/apps/sim/lib/workflows/sanitization/json-sanitizer.ts
index eb062599f0..8ee5b01957 100644
--- a/apps/sim/lib/workflows/sanitization/json-sanitizer.ts
+++ b/apps/sim/lib/workflows/sanitization/json-sanitizer.ts
@@ -59,26 +59,36 @@ export interface ExportWorkflowState {
id: string
name: string
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain'
- value: any
+ value: unknown
}>
}
}
+/** Condition structure for sanitization */
+interface SanitizedCondition {
+ id: string
+ title: string
+ value: string
+}
+
/**
* Sanitize condition blocks by removing UI-specific metadata
* Returns cleaned JSON string (not parsed array)
*/
function sanitizeConditions(conditionsJson: string): string {
try {
- const conditions = JSON.parse(conditionsJson)
+ const conditions: unknown = JSON.parse(conditionsJson)
if (!Array.isArray(conditions)) return conditionsJson
// Keep only id, title, and value - remove UI state
- const cleaned = conditions.map((cond: any) => ({
- id: cond.id,
- title: cond.title,
- value: cond.value || '',
- }))
+ const cleaned: SanitizedCondition[] = conditions.map((cond: unknown) => {
+ const condition = cond as Record
+ return {
+ id: String(condition.id ?? ''),
+ title: String(condition.title ?? ''),
+ value: String(condition.value ?? ''),
+ }
+ })
return JSON.stringify(cleaned)
} catch {
@@ -86,11 +96,50 @@ function sanitizeConditions(conditionsJson: string): string {
}
}
+/** Tool input structure for sanitization */
+interface ToolInput {
+ type: string
+ customToolId?: string
+ schema?: {
+ type?: string
+ function?: {
+ name: string
+ description?: string
+ parameters?: unknown
+ }
+ }
+ code?: string
+ title?: string
+ toolId?: string
+ usageControl?: string
+ isExpanded?: boolean
+ [key: string]: unknown
+}
+
+/** Sanitized tool output structure */
+interface SanitizedTool {
+ type: string
+ customToolId?: string
+ usageControl?: string
+ title?: string
+ toolId?: string
+ schema?: {
+ type: string
+ function: {
+ name: string
+ description?: string
+ parameters?: unknown
+ }
+ }
+ code?: string
+ [key: string]: unknown
+}
+
/**
* Sanitize tools array by removing UI state and redundant fields
*/
-function sanitizeTools(tools: any[]): any[] {
- return tools.map((tool) => {
+function sanitizeTools(tools: ToolInput[]): SanitizedTool[] {
+ return tools.map((tool): SanitizedTool => {
if (tool.type === 'custom-tool') {
// New reference format: minimal fields only
if (tool.customToolId && !tool.schema && !tool.code) {
@@ -102,7 +151,7 @@ function sanitizeTools(tools: any[]): any[] {
}
// Legacy inline format: include all fields
- const sanitized: any = {
+ const sanitized: SanitizedTool = {
type: tool.type,
title: tool.title,
toolId: tool.toolId,
@@ -129,23 +178,24 @@ function sanitizeTools(tools: any[]): any[] {
return sanitized
}
- const { isExpanded, ...cleanTool } = tool
- return cleanTool
+ const { isExpanded: _isExpanded, ...cleanTool } = tool
+ return cleanTool as SanitizedTool
})
}
/**
* Sort object keys recursively for consistent comparison
*/
-function sortKeysRecursively(item: any): any {
+function sortKeysRecursively(item: unknown): unknown {
if (Array.isArray(item)) {
return item.map(sortKeysRecursively)
}
if (item !== null && typeof item === 'object') {
- return Object.keys(item)
+ const obj = item as Record
+ return Object.keys(obj)
.sort()
- .reduce((result: any, key: string) => {
- result[key] = sortKeysRecursively(item[key])
+ .reduce((result: Record, key: string) => {
+ result[key] = sortKeysRecursively(obj[key])
return result
}, {})
}
@@ -183,7 +233,7 @@ function sanitizeSubBlocks(
// Sort keys for consistent comparison
if (obj && typeof obj === 'object') {
- sanitized[key] = sortKeysRecursively(obj)
+ sanitized[key] = sortKeysRecursively(obj) as Record
return
}
} catch {
@@ -201,7 +251,7 @@ function sanitizeSubBlocks(
}
if (key === 'tools' && Array.isArray(subBlock.value)) {
- sanitized[key] = sanitizeTools(subBlock.value)
+ sanitized[key] = sanitizeTools(subBlock.value as unknown as ToolInput[])
return
}
@@ -383,7 +433,7 @@ export function sanitizeForExport(state: WorkflowState): ExportWorkflowState {
// Use unified sanitization with env var preservation for export
const sanitizedState = sanitizeWorkflowForSharing(fullState, {
preserveEnvVars: true, // Keep {{ENV_VAR}} references in exported workflows
- })
+ }) as ExportWorkflowState['state']
return {
version: '1.0',
diff --git a/apps/sim/lib/workflows/sanitization/validation.ts b/apps/sim/lib/workflows/sanitization/validation.ts
index 75e9ef5639..4c25d19981 100644
--- a/apps/sim/lib/workflows/sanitization/validation.ts
+++ b/apps/sim/lib/workflows/sanitization/validation.ts
@@ -1,20 +1,40 @@
import { createLogger } from '@sim/logger'
import { getBlock } from '@/blocks/registry'
import { isCustomTool, isMcpTool } from '@/executor/constants'
-import type { WorkflowState } from '@/stores/workflows/workflow/types'
+import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
import { getTool } from '@/tools/utils'
const logger = createLogger('WorkflowValidation')
+/** Tool structure for validation */
+interface AgentTool {
+ type: string
+ customToolId?: string
+ schema?: {
+ type?: string
+ function?: {
+ name?: string
+ parameters?: {
+ type?: string
+ properties?: Record
+ }
+ }
+ }
+ code?: string
+ usageControl?: string
+ [key: string]: unknown
+}
+
/**
* Checks if a custom tool has a valid inline schema
*/
-function isValidCustomToolSchema(tool: any): boolean {
+function isValidCustomToolSchema(tool: unknown): boolean {
try {
if (!tool || typeof tool !== 'object') return false
- if (tool.type !== 'custom-tool') return true // non-custom tools are validated elsewhere
+ const t = tool as AgentTool
+ if (t.type !== 'custom-tool') return true // non-custom tools are validated elsewhere
- const schema = tool.schema
+ const schema = t.schema
if (!schema || typeof schema !== 'object') return false
const fn = schema.function
if (!fn || typeof fn !== 'object') return false
@@ -34,14 +54,15 @@ function isValidCustomToolSchema(tool: any): boolean {
/**
* Checks if a custom tool is a valid reference-only format (new format)
*/
-function isValidCustomToolReference(tool: any): boolean {
+function isValidCustomToolReference(tool: unknown): boolean {
try {
if (!tool || typeof tool !== 'object') return false
- if (tool.type !== 'custom-tool') return false
+ const t = tool as AgentTool
+ if (t.type !== 'custom-tool') return false
// Reference format: has customToolId but no inline schema/code
// This is valid - the tool will be loaded dynamically during execution
- if (tool.customToolId && typeof tool.customToolId === 'string') {
+ if (t.customToolId && typeof t.customToolId === 'string') {
return true
}
@@ -51,14 +72,14 @@ function isValidCustomToolReference(tool: any): boolean {
}
}
-export function sanitizeAgentToolsInBlocks(blocks: Record): {
- blocks: Record
+export function sanitizeAgentToolsInBlocks(blocks: Record): {
+ blocks: Record
warnings: string[]
} {
const warnings: string[] = []
// Shallow clone to avoid mutating callers
- const sanitizedBlocks: Record = { ...blocks }
+ const sanitizedBlocks: Record = { ...blocks }
for (const [blockId, block] of Object.entries(sanitizedBlocks)) {
try {
@@ -90,10 +111,11 @@ export function sanitizeAgentToolsInBlocks(blocks: Record): {
const originalLength = value.length
const cleaned = value
- .filter((tool: any) => {
+ .filter((tool: unknown) => {
// Allow non-custom tools to pass through as-is
if (!tool || typeof tool !== 'object') return false
- if (tool.type !== 'custom-tool') return true
+ const t = tool as AgentTool
+ if (t.type !== 'custom-tool') return true
// Check if it's a valid reference-only format (new format)
if (isValidCustomToolReference(tool)) {
@@ -106,21 +128,22 @@ export function sanitizeAgentToolsInBlocks(blocks: Record): {
logger.warn('Removing invalid custom tool from workflow', {
blockId,
blockName: block.name,
- hasCustomToolId: !!tool.customToolId,
- hasSchema: !!tool.schema,
+ hasCustomToolId: !!t.customToolId,
+ hasSchema: !!t.schema,
})
}
return ok
})
- .map((tool: any) => {
- if (tool.type === 'custom-tool') {
+ .map((tool: unknown) => {
+ const t = tool as AgentTool
+ if (t.type === 'custom-tool') {
// For reference-only tools, ensure usageControl default
- if (!tool.usageControl) {
- tool.usageControl = 'auto'
+ if (!t.usageControl) {
+ t.usageControl = 'auto'
}
// For inline tools (legacy), also ensure code default
- if (!tool.customToolId && (!tool.code || typeof tool.code !== 'string')) {
- tool.code = ''
+ if (!t.customToolId && (!t.code || typeof t.code !== 'string')) {
+ t.code = ''
}
}
return tool
@@ -132,13 +155,14 @@ export function sanitizeAgentToolsInBlocks(blocks: Record): {
)
}
- toolsSubBlock.value = cleaned
+ // Cast cleaned to the expected SubBlockState value type
+ // The value is a tools array but SubBlockState.value is typed narrowly
+ toolsSubBlock.value = cleaned as unknown as typeof toolsSubBlock.value
// Reassign in case caller uses object identity
sanitizedBlocks[blockId] = { ...block, subBlocks: { ...subBlocks, tools: toolsSubBlock } }
- } catch (err: any) {
- warnings.push(
- `Block ${block?.name || blockId}: tools sanitation failed: ${err?.message || String(err)}`
- )
+ } catch (err: unknown) {
+ const message = err instanceof Error ? err.message : String(err)
+ warnings.push(`Block ${block?.name || blockId}: tools sanitation failed: ${message}`)
}
}
@@ -177,7 +201,7 @@ export function validateWorkflowState(
}
// Validate each block
- const sanitizedBlocks: Record = {}
+ const sanitizedBlocks: Record = {}
let hasChanges = false
for (const [blockId, block] of Object.entries(workflowState.blocks)) {
diff --git a/apps/sim/lib/workflows/streaming/streaming.ts b/apps/sim/lib/workflows/streaming/streaming.ts
index 6a12d78722..b1fe64b637 100644
--- a/apps/sim/lib/workflows/streaming/streaming.ts
+++ b/apps/sim/lib/workflows/streaming/streaming.ts
@@ -8,7 +8,15 @@ import { encodeSSE } from '@/lib/core/utils/sse'
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
import { processStreamingBlockLogs } from '@/lib/tokenization'
import { executeWorkflow } from '@/lib/workflows/executor/execute-workflow'
-import type { ExecutionResult } from '@/executor/types'
+import type { BlockLog, ExecutionResult, StreamingExecution } from '@/executor/types'
+
+/**
+ * Extended streaming execution type that includes blockId on the execution.
+ * The runtime passes blockId but the base StreamingExecution type doesn't declare it.
+ */
+interface StreamingExecutionWithBlockId extends Omit {
+ execution?: StreamingExecution['execution'] & { blockId?: string }
+}
const logger = createLogger('WorkflowStreaming')
@@ -27,9 +35,9 @@ export interface StreamingResponseOptions {
userId: string
workspaceId?: string | null
isDeployed?: boolean
- variables?: Record
+ variables?: Record
}
- input: any
+ input: unknown
executingUserId: string
streamConfig: StreamingConfig
executionId?: string
@@ -41,7 +49,7 @@ interface StreamingState {
streamCompletionTimes: Map
}
-function extractOutputValue(output: any, path: string): any {
+function extractOutputValue(output: unknown, path: string): unknown {
return traverseObjectPath(output, path)
}
@@ -54,11 +62,11 @@ function buildMinimalResult(
selectedOutputs: string[] | undefined,
streamedContent: Map,
requestId: string
-): { success: boolean; error?: string; output: Record } {
+): { success: boolean; error?: string; output: Record } {
const minimalResult = {
success: result.success,
error: result.error,
- output: {} as Record,
+ output: {} as Record,
}
if (!selectedOutputs?.length) {
@@ -88,7 +96,7 @@ function buildMinimalResult(
continue
}
- const blockLog = result.logs.find((log: any) => log.blockId === blockId)
+ const blockLog = result.logs.find((log: BlockLog) => log.blockId === blockId)
if (!blockLog?.output) {
continue
}
@@ -99,16 +107,16 @@ function buildMinimalResult(
}
if (!minimalResult.output[blockId]) {
- minimalResult.output[blockId] = Object.create(null)
+ minimalResult.output[blockId] = Object.create(null) as Record
}
- minimalResult.output[blockId][path] = value
+ ;(minimalResult.output[blockId] as Record)[path] = value
}
return minimalResult
}
-function updateLogsWithStreamedContent(logs: any[], state: StreamingState): any[] {
- return logs.map((log: any) => {
+function updateLogsWithStreamedContent(logs: BlockLog[], state: StreamingState): BlockLog[] {
+ return logs.map((log: BlockLog) => {
if (!state.streamedContent.has(log.blockId)) {
return log
}
@@ -168,10 +176,10 @@ export async function createStreamingResponse(
state.processedOutputs.add(blockId)
}
- const onStreamCallback = async (streamingExec: {
- stream: ReadableStream
- execution?: { blockId?: string }
- }) => {
+ /**
+ * Callback for handling streaming execution events.
+ */
+ const onStreamCallback = async (streamingExec: StreamingExecutionWithBlockId) => {
const blockId = streamingExec.execution?.blockId
if (!blockId) {
logger.warn(`[${requestId}] Streaming execution missing blockId`)
@@ -215,7 +223,7 @@ export async function createStreamingResponse(
}
}
- const onBlockCompleteCallback = async (blockId: string, output: any) => {
+ const onBlockCompleteCallback = async (blockId: string, output: unknown) => {
if (!streamConfig.selectedOutputs?.length) {
return
}
diff --git a/apps/sim/lib/workflows/training/compute-edit-sequence.ts b/apps/sim/lib/workflows/training/compute-edit-sequence.ts
index b50ce49211..da9798d560 100644
--- a/apps/sim/lib/workflows/training/compute-edit-sequence.ts
+++ b/apps/sim/lib/workflows/training/compute-edit-sequence.ts
@@ -1,4 +1,7 @@
-import type { CopilotWorkflowState } from '@/lib/workflows/sanitization/json-sanitizer'
+import type {
+ CopilotBlockState,
+ CopilotWorkflowState,
+} from '@/lib/workflows/sanitization/json-sanitizer'
import { TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants'
export interface EditOperation {
@@ -7,13 +10,12 @@ export interface EditOperation {
params?: {
type?: string
name?: string
- outputs?: Record
enabled?: boolean
triggerMode?: boolean
advancedMode?: boolean
- inputs?: Record
- connections?: Record
- nestedNodes?: Record
+ inputs?: Record
+ connections?: Record
+ nestedNodes?: Record
subflowId?: string
}
}
@@ -34,11 +36,11 @@ export interface WorkflowDiff {
* Returns map of blockId -> {block, parentId}
*/
function flattenBlocks(
- blocks: Record
-): Record {
- const flattened: Record = {}
+ blocks: Record
+): Record {
+ const flattened: Record = {}
- const processBlock = (blockId: string, block: any, parentId?: string) => {
+ const processBlock = (blockId: string, block: CopilotBlockState, parentId?: string) => {
flattened[blockId] = { block, parentId }
// Recursively process nested nodes
@@ -56,23 +58,20 @@ function flattenBlocks(
return flattened
}
-/**
- * Extract all edges from blocks with embedded connections (including nested)
- */
-function extractAllEdgesFromBlocks(blocks: Record): Array<{
+interface ExtractedEdge {
source: string
target: string
sourceHandle?: string | null
targetHandle?: string | null
-}> {
- const edges: Array<{
- source: string
- target: string
- sourceHandle?: string | null
- targetHandle?: string | null
- }> = []
-
- const processBlockConnections = (block: any, blockId: string) => {
+}
+
+/**
+ * Extract all edges from blocks with embedded connections (including nested)
+ */
+function extractAllEdgesFromBlocks(blocks: Record): ExtractedEdge[] {
+ const edges: ExtractedEdge[] = []
+
+ const processBlockConnections = (block: CopilotBlockState, blockId: string) => {
if (block.connections) {
Object.entries(block.connections).forEach(([sourceHandle, targets]) => {
const targetArray = Array.isArray(targets) ? targets : [targets]
@@ -191,7 +190,6 @@ export function computeEditSequence(
subflowId: parentId,
type: block.type,
name: block.name,
- outputs: block.outputs,
enabled: block.enabled !== undefined ? block.enabled : true,
}
@@ -296,7 +294,6 @@ export function computeEditSequence(
subflowId: endParentId,
type: endBlock.type,
name: endBlock.name,
- outputs: endBlock.outputs,
enabled: endBlock.enabled !== undefined ? endBlock.enabled : true,
}
@@ -359,33 +356,22 @@ export function computeEditSequence(
* Extract input values from a block
* Works with sanitized format where inputs is Record |