From db199752a23736c2a1d245fc92c44ac34dead35f Mon Sep 17 00:00:00 2001 From: waleed Date: Wed, 11 Feb 2026 13:47:27 -0800 Subject: [PATCH 01/14] fix(terminal): reconnect to running executions after page refresh --- .../app/api/workflows/[id]/execute/route.ts | 39 ++- .../executions/[executionId]/stream/route.ts | 163 +++++++++++ .../hooks/use-workflow-execution.ts | 269 ++++++++++++++++-- apps/sim/hooks/use-execution-stream.ts | 120 +++++--- apps/sim/lib/execution/event-buffer.ts | 228 +++++++++++++++ apps/sim/stores/execution/store.ts | 12 + apps/sim/stores/execution/types.ts | 7 + apps/sim/stores/terminal/console/store.ts | 25 +- apps/sim/stores/terminal/console/types.ts | 1 + 9 files changed, 784 insertions(+), 80 deletions(-) create mode 100644 apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts create mode 100644 apps/sim/lib/execution/event-buffer.ts diff --git a/apps/sim/app/api/workflows/[id]/execute/route.ts b/apps/sim/app/api/workflows/[id]/execute/route.ts index 13fc0ff41f..ec857deb38 100644 --- a/apps/sim/app/api/workflows/[id]/execute/route.ts +++ b/apps/sim/app/api/workflows/[id]/execute/route.ts @@ -12,7 +12,7 @@ import { import { generateRequestId } from '@/lib/core/utils/request' import { SSE_HEADERS } from '@/lib/core/utils/sse' import { getBaseUrl } from '@/lib/core/utils/urls' -import { markExecutionCancelled } from '@/lib/execution/cancellation' +import { createExecutionEventWriter, setExecutionMeta } from '@/lib/execution/event-buffer' import { processInputFileFields } from '@/lib/execution/files' import { preprocessExecution } from '@/lib/execution/preprocessing' import { LoggingSession } from '@/lib/logs/execution/logging-session' @@ -700,15 +700,27 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync) let isStreamClosed = false + const eventWriter = createExecutionEventWriter(executionId) + setExecutionMeta(executionId, { + status: 'active', + userId: actorUserId, + workflowId, + }).catch(() => {}) + const stream = new ReadableStream({ async start(controller) { - const sendEvent = (event: ExecutionEvent) => { - if (isStreamClosed) return + let finalMetaStatus: 'complete' | 'error' | 'cancelled' | null = null - try { - controller.enqueue(encodeSSEEvent(event)) - } catch { - isStreamClosed = true + const sendEvent = (event: ExecutionEvent) => { + if (!isStreamClosed) { + try { + controller.enqueue(encodeSSEEvent(event)) + } catch { + isStreamClosed = true + } + } + if (event.type !== 'stream:chunk' && event.type !== 'stream:done') { + eventWriter.write(event as unknown as Record).catch(() => {}) } } @@ -951,6 +963,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: duration: result.metadata?.duration || 0, }, }) + finalMetaStatus = 'error' } else { logger.info(`[${requestId}] Workflow execution was cancelled`) @@ -963,6 +976,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: duration: result.metadata?.duration || 0, }, }) + finalMetaStatus = 'cancelled' } return } @@ -986,6 +1000,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: endTime: result.metadata?.endTime || new Date().toISOString(), }, }) + finalMetaStatus = 'complete' } catch (error: unknown) { const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut() const errorMessage = isTimeout @@ -1017,7 +1032,12 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: duration: executionResult?.metadata?.duration || 0, }, }) + finalMetaStatus = 'error' } finally { + await eventWriter.close() + if (finalMetaStatus) { + setExecutionMeta(executionId, { status: finalMetaStatus }).catch(() => {}) + } timeoutController.cleanup() if (executionId) { await cleanupExecutionBase64Cache(executionId) @@ -1032,10 +1052,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: }, cancel() { isStreamClosed = true - timeoutController.cleanup() - logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`) - timeoutController.abort() - markExecutionCancelled(executionId).catch(() => {}) + logger.info(`[${requestId}] Client disconnected from SSE stream`) }, }) diff --git a/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts b/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts new file mode 100644 index 0000000000..254831c105 --- /dev/null +++ b/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts @@ -0,0 +1,163 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { checkHybridAuth } from '@/lib/auth/hybrid' +import { SSE_HEADERS } from '@/lib/core/utils/sse' +import { + type ExecutionStreamStatus, + getExecutionMeta, + readExecutionEvents, +} from '@/lib/execution/event-buffer' +import { formatSSEEvent } from '@/lib/workflows/executor/execution-events' +import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils' + +const logger = createLogger('ExecutionStreamReconnectAPI') + +const POLL_INTERVAL_MS = 500 + +function isTerminalStatus(status: ExecutionStreamStatus): boolean { + return status === 'complete' || status === 'error' || status === 'cancelled' +} + +export const runtime = 'nodejs' +export const dynamic = 'force-dynamic' + +export async function GET( + req: NextRequest, + { params }: { params: Promise<{ id: string; executionId: string }> } +) { + const { id: workflowId, executionId } = await params + + try { + const auth = await checkHybridAuth(req, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 }) + } + + const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({ + workflowId, + userId: auth.userId, + action: 'read', + }) + if (!workflowAuthorization.allowed) { + return NextResponse.json( + { error: workflowAuthorization.message || 'Access denied' }, + { status: workflowAuthorization.status } + ) + } + + const meta = await getExecutionMeta(executionId) + if (!meta) { + return NextResponse.json({ error: 'Execution buffer not found or expired' }, { status: 404 }) + } + + if (meta.workflowId && meta.workflowId !== workflowId) { + return NextResponse.json( + { error: 'Execution does not belong to this workflow' }, + { status: 403 } + ) + } + + const fromParam = req.nextUrl.searchParams.get('from') + const fromEventId = fromParam ? Number.parseInt(fromParam, 10) : 0 + + logger.info('Reconnection stream requested', { + workflowId, + executionId, + fromEventId, + metaStatus: meta.status, + }) + + const encoder = new TextEncoder() + + const stream = new ReadableStream({ + async start(controller) { + let lastEventId = fromEventId + let closed = false + + const enqueue = (text: string) => { + if (closed) return + try { + controller.enqueue(encoder.encode(text)) + } catch { + closed = true + } + } + + try { + // Replay buffered events + const events = await readExecutionEvents(executionId, lastEventId) + for (const entry of events) { + if (closed) return + enqueue(formatSSEEvent(entry.event as any)) + lastEventId = entry.eventId + } + + // Check if execution is already done + const currentMeta = await getExecutionMeta(executionId) + if (!currentMeta || isTerminalStatus(currentMeta.status)) { + enqueue('data: [DONE]\n\n') + if (!closed) controller.close() + return + } + + // Poll for new events until execution completes + while (!closed) { + await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS)) + if (closed) return + + const newEvents = await readExecutionEvents(executionId, lastEventId) + for (const entry of newEvents) { + if (closed) return + enqueue(formatSSEEvent(entry.event as any)) + lastEventId = entry.eventId + } + + const polledMeta = await getExecutionMeta(executionId) + if (!polledMeta || isTerminalStatus(polledMeta.status)) { + // One final read to catch any events flushed alongside the meta update + const finalEvents = await readExecutionEvents(executionId, lastEventId) + for (const entry of finalEvents) { + if (closed) return + enqueue(formatSSEEvent(entry.event as any)) + lastEventId = entry.eventId + } + enqueue('data: [DONE]\n\n') + if (!closed) controller.close() + return + } + } + } catch (error) { + logger.error('Error in reconnection stream', { + executionId, + error: error instanceof Error ? error.message : String(error), + }) + if (!closed) { + try { + controller.close() + } catch {} + } + } + }, + cancel() { + logger.info('Client disconnected from reconnection stream', { executionId }) + }, + }) + + return new NextResponse(stream, { + headers: { + ...SSE_HEADERS, + 'X-Execution-Id': executionId, + }, + }) + } catch (error: any) { + logger.error('Failed to start reconnection stream', { + workflowId, + executionId, + error: error.message, + }) + return NextResponse.json( + { error: error.message || 'Failed to start reconnection stream' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts index 16c0e81f18..9df2754648 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts @@ -1,4 +1,4 @@ -import { useCallback, useRef, useState } from 'react' +import { useCallback, useEffect, useRef, useState } from 'react' import { createLogger } from '@sim/logger' import { useQueryClient } from '@tanstack/react-query' import { v4 as uuidv4 } from 'uuid' @@ -54,7 +54,7 @@ interface DebugValidationResult { interface BlockEventHandlerConfig { workflowId?: string - executionId?: string + executionIdRef: { current: string } workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }> activeBlocksSet: Set accumulatedBlockLogs: BlockLog[] @@ -108,12 +108,15 @@ export function useWorkflowExecution() { const queryClient = useQueryClient() const currentWorkflow = useCurrentWorkflow() const { activeWorkflowId, workflows } = useWorkflowRegistry() - const { toggleConsole, addConsole, updateConsole, cancelRunningEntries } = + const { toggleConsole, addConsole, updateConsole, cancelRunningEntries, clearExecutionEntries } = useTerminalConsoleStore() + const hasHydrated = useTerminalConsoleStore((s) => s._hasHydrated) const { getAllVariables } = useEnvironmentStore() const { getVariablesByWorkflowId, variables } = useVariablesStore() const { isExecuting, isDebugging, pendingBlocks, executor, debugContext } = useCurrentWorkflowExecution() + const setCurrentExecutionId = useExecutionStore((s) => s.setCurrentExecutionId) + const getCurrentExecutionId = useExecutionStore((s) => s.getCurrentExecutionId) const setIsExecuting = useExecutionStore((s) => s.setIsExecuting) const setIsDebugging = useExecutionStore((s) => s.setIsDebugging) const setPendingBlocks = useExecutionStore((s) => s.setPendingBlocks) @@ -297,7 +300,7 @@ export function useWorkflowExecution() { (config: BlockEventHandlerConfig) => { const { workflowId, - executionId, + executionIdRef, workflowEdges, activeBlocksSet, accumulatedBlockLogs, @@ -308,6 +311,14 @@ export function useWorkflowExecution() { onBlockCompleteCallback, } = config + /** Returns true if this execution was cancelled or superseded by another run. */ + const isStaleExecution = () => + !!( + workflowId && + executionIdRef.current && + useExecutionStore.getState().getCurrentExecutionId(workflowId) !== executionIdRef.current + ) + const updateActiveBlocks = (blockId: string, isActive: boolean) => { if (!workflowId) return if (isActive) { @@ -360,7 +371,7 @@ export function useWorkflowExecution() { endedAt: data.endedAt, workflowId, blockId: data.blockId, - executionId, + executionId: executionIdRef.current, blockName: data.blockName || 'Unknown Block', blockType: data.blockType || 'unknown', iterationCurrent: data.iterationCurrent, @@ -383,7 +394,7 @@ export function useWorkflowExecution() { endedAt: data.endedAt, workflowId, blockId: data.blockId, - executionId, + executionId: executionIdRef.current, blockName: data.blockName || 'Unknown Block', blockType: data.blockType || 'unknown', iterationCurrent: data.iterationCurrent, @@ -410,7 +421,7 @@ export function useWorkflowExecution() { iterationType: data.iterationType, iterationContainerId: data.iterationContainerId, }, - executionId + executionIdRef.current ) } @@ -432,11 +443,12 @@ export function useWorkflowExecution() { iterationType: data.iterationType, iterationContainerId: data.iterationContainerId, }, - executionId + executionIdRef.current ) } const onBlockStarted = (data: BlockStartedData) => { + if (isStaleExecution()) return updateActiveBlocks(data.blockId, true) markIncomingEdges(data.blockId) @@ -453,7 +465,7 @@ export function useWorkflowExecution() { endedAt: undefined, workflowId, blockId: data.blockId, - executionId, + executionId: executionIdRef.current, blockName: data.blockName || 'Unknown Block', blockType: data.blockType || 'unknown', isRunning: true, @@ -465,6 +477,7 @@ export function useWorkflowExecution() { } const onBlockCompleted = (data: BlockCompletedData) => { + if (isStaleExecution()) return updateActiveBlocks(data.blockId, false) if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'success') @@ -495,6 +508,7 @@ export function useWorkflowExecution() { } const onBlockError = (data: BlockErrorData) => { + if (isStaleExecution()) return updateActiveBlocks(data.blockId, false) if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'error') @@ -1275,7 +1289,7 @@ export function useWorkflowExecution() { if (activeWorkflowId) { logger.info('Using server-side executor') - const executionId = uuidv4() + const executionIdRef = { current: '' } let executionResult: ExecutionResult = { success: false, @@ -1293,7 +1307,7 @@ export function useWorkflowExecution() { try { const blockHandlers = buildBlockEventHandlers({ workflowId: activeWorkflowId, - executionId, + executionIdRef, workflowEdges, activeBlocksSet, accumulatedBlockLogs, @@ -1326,6 +1340,10 @@ export function useWorkflowExecution() { loops: clientWorkflowState.loops, parallels: clientWorkflowState.parallels, }, + onExecutionId: (id) => { + executionIdRef.current = id + setCurrentExecutionId(activeWorkflowId, id) + }, callbacks: { onExecutionStarted: (data) => { logger.info('Server execution started:', data) @@ -1368,6 +1386,17 @@ export function useWorkflowExecution() { }, onExecutionCompleted: (data) => { + if ( + activeWorkflowId && + useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !== + executionIdRef.current + ) + return + + if (activeWorkflowId) { + setCurrentExecutionId(activeWorkflowId, null) + } + executionResult = { success: data.success, output: data.output, @@ -1428,6 +1457,17 @@ export function useWorkflowExecution() { }, onExecutionError: (data) => { + if ( + activeWorkflowId && + useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !== + executionIdRef.current + ) + return + + if (activeWorkflowId) { + setCurrentExecutionId(activeWorkflowId, null) + } + executionResult = { success: false, output: {}, @@ -1441,7 +1481,7 @@ export function useWorkflowExecution() { const isPreExecutionError = accumulatedBlockLogs.length === 0 handleExecutionErrorConsole({ workflowId: activeWorkflowId, - executionId, + executionId: executionIdRef.current, error: data.error, durationMs: data.duration, blockLogs: accumulatedBlockLogs, @@ -1450,9 +1490,20 @@ export function useWorkflowExecution() { }, onExecutionCancelled: (data) => { + if ( + activeWorkflowId && + useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !== + executionIdRef.current + ) + return + + if (activeWorkflowId) { + setCurrentExecutionId(activeWorkflowId, null) + } + handleExecutionCancelledConsole({ workflowId: activeWorkflowId, - executionId, + executionId: executionIdRef.current, durationMs: data?.duration, }) }, @@ -1461,7 +1512,8 @@ export function useWorkflowExecution() { return executionResult } catch (error: any) { - // Don't log abort errors - they're intentional user actions + // Disconnect errors (AbortError, network error) are swallowed by useExecutionStream + // and won't reach here. Only genuine execution failures propagate. if (error.name === 'AbortError' || error.message?.includes('aborted')) { logger.info('Execution aborted by user') @@ -1719,23 +1771,40 @@ export function useWorkflowExecution() { const handleCancelExecution = useCallback(() => { logger.info('Workflow execution cancellation requested') - // Cancel the execution stream for this workflow (server-side) + // 1. Read + clear execution ID first so the isStaleExecution guard + // blocks any further SSE callbacks from the old execution. + const storedExecutionId = activeWorkflowId ? getCurrentExecutionId(activeWorkflowId) : null + if (activeWorkflowId) { + setCurrentExecutionId(activeWorkflowId, null) + } + + // 2. Send cancel signal to server via stored executionId + if (activeWorkflowId && storedExecutionId) { + fetch(`/api/workflows/${activeWorkflowId}/executions/${storedExecutionId}/cancel`, { + method: 'POST', + }).catch(() => {}) + } + + // 3. Abort local SSE stream (if still connected) executionStream.cancel(activeWorkflowId ?? undefined) - // Mark current chat execution as superseded so its cleanup won't affect new executions + // 4. Update terminal: mark running entries as cancelled + add "Execution Cancelled" entry + if (activeWorkflowId) { + handleExecutionCancelledConsole({ + workflowId: activeWorkflowId, + executionId: storedExecutionId ?? undefined, + }) + } + currentChatExecutionIdRef.current = null - // Mark all running entries as canceled in the terminal + // 5. Reset remaining execution state if (activeWorkflowId) { - cancelRunningEntries(activeWorkflowId) - - // Reset execution state - this triggers chat stream cleanup via useEffect in chat.tsx setIsExecuting(activeWorkflowId, false) setIsDebugging(activeWorkflowId, false) setActiveBlocks(activeWorkflowId, new Set()) } - // If in debug mode, also reset debug state if (isDebugging) { resetDebugState() } @@ -1747,7 +1816,9 @@ export function useWorkflowExecution() { setIsDebugging, setActiveBlocks, activeWorkflowId, - cancelRunningEntries, + getCurrentExecutionId, + setCurrentExecutionId, + handleExecutionCancelledConsole, ]) /** @@ -1847,7 +1918,7 @@ export function useWorkflowExecution() { } setIsExecuting(workflowId, true) - const executionId = uuidv4() + const executionIdRef = { current: '' } const accumulatedBlockLogs: BlockLog[] = [] const accumulatedBlockStates = new Map() const executedBlockIds = new Set() @@ -1856,7 +1927,7 @@ export function useWorkflowExecution() { try { const blockHandlers = buildBlockEventHandlers({ workflowId, - executionId, + executionIdRef, workflowEdges, activeBlocksSet, accumulatedBlockLogs, @@ -1871,6 +1942,10 @@ export function useWorkflowExecution() { startBlockId: blockId, sourceSnapshot: effectiveSnapshot, input: workflowInput, + onExecutionId: (id) => { + executionIdRef.current = id + setCurrentExecutionId(workflowId, id) + }, callbacks: { onBlockStarted: blockHandlers.onBlockStarted, onBlockCompleted: blockHandlers.onBlockCompleted, @@ -1921,7 +1996,7 @@ export function useWorkflowExecution() { handleExecutionErrorConsole({ workflowId, - executionId, + executionId: executionIdRef.current, error: data.error, durationMs: data.duration, blockLogs: accumulatedBlockLogs, @@ -1931,7 +2006,7 @@ export function useWorkflowExecution() { onExecutionCancelled: (data) => { handleExecutionCancelledConsole({ workflowId, - executionId, + executionId: executionIdRef.current, durationMs: data?.duration, }) }, @@ -1942,6 +2017,7 @@ export function useWorkflowExecution() { logger.error('Run-from-block failed:', error) } } finally { + setCurrentExecutionId(workflowId, null) setIsExecuting(workflowId, false) setActiveBlocks(workflowId, new Set()) } @@ -1950,6 +2026,7 @@ export function useWorkflowExecution() { getLastExecutionSnapshot, setLastExecutionSnapshot, clearLastExecutionSnapshot, + setCurrentExecutionId, setIsExecuting, setActiveBlocks, setBlockRunStatus, @@ -2002,6 +2079,146 @@ export function useWorkflowExecution() { [activeWorkflowId, setExecutionResult, setIsExecuting, setIsDebugging, setActiveBlocks] ) + useEffect(() => { + if (!activeWorkflowId || !hasHydrated) return + + // Only attempt reconnection after a full page refresh. + // The execution store is NOT persisted, so isExecuting resets to false on refresh. + // During SPA navigation, isExecuting is still true → the SSE connection is alive. + const workflowExecState = useExecutionStore.getState().workflowExecutions.get(activeWorkflowId) + if (workflowExecState?.isExecuting) return + + const entries = useTerminalConsoleStore.getState().entries + const runningEntries = entries.filter( + (e) => e.isRunning && e.workflowId === activeWorkflowId && e.executionId + ) + if (runningEntries.length === 0) return + + // Pick the most recent execution by startedAt timestamp. + // Old zombie entries from previous executions may still have isRunning=true. + const sorted = [...runningEntries].sort((a, b) => { + const aTime = a.startedAt ? new Date(a.startedAt).getTime() : 0 + const bTime = b.startedAt ? new Date(b.startedAt).getTime() : 0 + return bTime - aTime + }) + const executionId = sorted[0].executionId! + + // Mark entries from older executions as stale + const otherExecutionIds = new Set( + sorted.filter((e) => e.executionId !== executionId).map((e) => e.executionId!) + ) + if (otherExecutionIds.size > 0) { + cancelRunningEntries(activeWorkflowId) + } + + setCurrentExecutionId(activeWorkflowId, executionId) + setIsExecuting(activeWorkflowId, true) + + const workflowEdges = useWorkflowStore.getState().edges + const activeBlocksSet = new Set() + const accumulatedBlockLogs: BlockLog[] = [] + const accumulatedBlockStates = new Map() + const executedBlockIds = new Set() + + const executionIdRef = { current: executionId } + + const handlers = buildBlockEventHandlers({ + workflowId: activeWorkflowId, + executionIdRef, + workflowEdges, + activeBlocksSet, + accumulatedBlockLogs, + accumulatedBlockStates, + executedBlockIds, + consoleMode: 'update', + includeStartConsoleEntry: true, + }) + + // Defer clearing old entries until the first reconnection event arrives. + // This keeps hydrated entries visible during the network round-trip, + // avoiding a flash of empty console. + let cleared = false + const clearOnce = () => { + if (!cleared) { + cleared = true + clearExecutionEntries(executionId) + } + } + + const reconnectWorkflowId = activeWorkflowId + + executionStream + .reconnect({ + workflowId: reconnectWorkflowId, + executionId, + callbacks: { + onBlockStarted: (data) => { + clearOnce() + handlers.onBlockStarted(data) + }, + onBlockCompleted: (data) => { + clearOnce() + handlers.onBlockCompleted(data) + }, + onBlockError: (data) => { + clearOnce() + handlers.onBlockError(data) + }, + onExecutionCompleted: () => { + clearOnce() + setCurrentExecutionId(reconnectWorkflowId, null) + setIsExecuting(reconnectWorkflowId, false) + setActiveBlocks(reconnectWorkflowId, new Set()) + }, + onExecutionError: (data) => { + clearOnce() + setCurrentExecutionId(reconnectWorkflowId, null) + setIsExecuting(reconnectWorkflowId, false) + setActiveBlocks(reconnectWorkflowId, new Set()) + handleExecutionErrorConsole({ + workflowId: reconnectWorkflowId, + executionId, + error: data.error, + blockLogs: accumulatedBlockLogs, + }) + }, + onExecutionCancelled: () => { + clearOnce() + setCurrentExecutionId(reconnectWorkflowId, null) + setIsExecuting(reconnectWorkflowId, false) + setActiveBlocks(reconnectWorkflowId, new Set()) + handleExecutionCancelledConsole({ + workflowId: reconnectWorkflowId, + executionId, + }) + }, + }, + }) + .catch((error) => { + logger.warn('Execution reconnection failed', { executionId, error }) + for (const entry of runningEntries.filter((e) => e.executionId === executionId)) { + addConsole({ + workflowId: entry.workflowId, + blockId: entry.blockId, + blockName: entry.blockName, + blockType: entry.blockType, + executionId: entry.executionId, + executionOrder: entry.executionOrder, + isRunning: false, + warning: 'Execution result unavailable — check the logs page', + }) + } + setCurrentExecutionId(reconnectWorkflowId, null) + setIsExecuting(reconnectWorkflowId, false) + setActiveBlocks(reconnectWorkflowId, new Set()) + }) + + return () => { + executionStream.cancel(reconnectWorkflowId) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [activeWorkflowId, hasHydrated]) + return { isExecuting, isDebugging, diff --git a/apps/sim/hooks/use-execution-stream.ts b/apps/sim/hooks/use-execution-stream.ts index e664788b5f..d2fbf6fadb 100644 --- a/apps/sim/hooks/use-execution-stream.ts +++ b/apps/sim/hooks/use-execution-stream.ts @@ -16,6 +16,18 @@ import type { SerializableExecutionState } from '@/executor/execution/types' const logger = createLogger('useExecutionStream') +/** + * Detects errors caused by the browser killing a fetch (page refresh, navigation, tab close). + * These should be treated as clean disconnects, not execution errors. + */ +function isClientDisconnectError(error: any): boolean { + if (error.name === 'AbortError') return true + const msg = (error.message ?? '').toLowerCase() + return ( + msg.includes('network error') || msg.includes('failed to fetch') || msg.includes('load failed') + ) +} + /** * Processes SSE events from a response body and invokes appropriate callbacks. */ @@ -121,6 +133,7 @@ export interface ExecuteStreamOptions { parallels?: Record } stopAfterBlockId?: string + onExecutionId?: (executionId: string) => void callbacks?: ExecutionStreamCallbacks } @@ -129,6 +142,14 @@ export interface ExecuteFromBlockOptions { startBlockId: string sourceSnapshot: SerializableExecutionState input?: any + onExecutionId?: (executionId: string) => void + callbacks?: ExecutionStreamCallbacks +} + +export interface ReconnectStreamOptions { + workflowId: string + executionId: string + fromEventId?: number callbacks?: ExecutionStreamCallbacks } @@ -143,7 +164,7 @@ export function useExecutionStream() { ) const execute = useCallback(async (options: ExecuteStreamOptions) => { - const { workflowId, callbacks = {}, ...payload } = options + const { workflowId, callbacks = {}, onExecutionId, ...payload } = options const existing = abortControllersRef.current.get(workflowId) if (existing) { @@ -177,24 +198,24 @@ export function useExecutionStream() { throw new Error('No response body') } - const executionId = response.headers.get('X-Execution-Id') - if (executionId) { - currentExecutionsRef.current.set(workflowId, { workflowId, executionId }) + const serverExecutionId = response.headers.get('X-Execution-Id') + if (serverExecutionId) { + currentExecutionsRef.current.set(workflowId, { workflowId, executionId: serverExecutionId }) + onExecutionId?.(serverExecutionId) } const reader = response.body.getReader() await processSSEStream(reader, callbacks, 'Execution') } catch (error: any) { - if (error.name === 'AbortError') { - logger.info('Execution stream cancelled') - callbacks.onExecutionCancelled?.({ duration: 0 }) - } else { - logger.error('Execution stream error:', error) - callbacks.onExecutionError?.({ - error: error.message || 'Unknown error', - duration: 0, - }) + if (isClientDisconnectError(error)) { + logger.info('Execution stream disconnected (page unload or abort)') + return } + logger.error('Execution stream error:', error) + callbacks.onExecutionError?.({ + error: error.message || 'Unknown error', + duration: 0, + }) throw error } finally { abortControllersRef.current.delete(workflowId) @@ -203,7 +224,14 @@ export function useExecutionStream() { }, []) const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => { - const { workflowId, startBlockId, sourceSnapshot, input, callbacks = {} } = options + const { + workflowId, + startBlockId, + sourceSnapshot, + input, + onExecutionId, + callbacks = {}, + } = options const existing = abortControllersRef.current.get(workflowId) if (existing) { @@ -246,24 +274,50 @@ export function useExecutionStream() { throw new Error('No response body') } - const executionId = response.headers.get('X-Execution-Id') - if (executionId) { - currentExecutionsRef.current.set(workflowId, { workflowId, executionId }) + const serverExecutionId = response.headers.get('X-Execution-Id') + if (serverExecutionId) { + currentExecutionsRef.current.set(workflowId, { workflowId, executionId: serverExecutionId }) + onExecutionId?.(serverExecutionId) } const reader = response.body.getReader() await processSSEStream(reader, callbacks, 'Run-from-block') } catch (error: any) { - if (error.name === 'AbortError') { - logger.info('Run-from-block execution cancelled') - callbacks.onExecutionCancelled?.({ duration: 0 }) - } else { - logger.error('Run-from-block execution error:', error) - callbacks.onExecutionError?.({ - error: error.message || 'Unknown error', - duration: 0, - }) + if (isClientDisconnectError(error)) { + logger.info('Run-from-block stream disconnected (page unload or abort)') + return } + logger.error('Run-from-block execution error:', error) + callbacks.onExecutionError?.({ + error: error.message || 'Unknown error', + duration: 0, + }) + throw error + } finally { + abortControllersRef.current.delete(workflowId) + currentExecutionsRef.current.delete(workflowId) + } + }, []) + + const reconnect = useCallback(async (options: ReconnectStreamOptions) => { + const { workflowId, executionId, fromEventId = 0, callbacks = {} } = options + + const abortController = new AbortController() + abortControllersRef.current.set(workflowId, abortController) + currentExecutionsRef.current.set(workflowId, { workflowId, executionId }) + + try { + const response = await fetch( + `/api/workflows/${workflowId}/executions/${executionId}/stream?from=${fromEventId}`, + { signal: abortController.signal } + ) + if (!response.ok) throw new Error(`Reconnect failed (${response.status})`) + if (!response.body) throw new Error('No response body') + + await processSSEStream(response.body.getReader(), callbacks, 'Reconnect') + } catch (error: any) { + if (isClientDisconnectError(error)) return + logger.error('Reconnection stream error:', error) throw error } finally { abortControllersRef.current.delete(workflowId) @@ -273,13 +327,6 @@ export function useExecutionStream() { const cancel = useCallback((workflowId?: string) => { if (workflowId) { - const execution = currentExecutionsRef.current.get(workflowId) - if (execution) { - fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, { - method: 'POST', - }).catch(() => {}) - } - const controller = abortControllersRef.current.get(workflowId) if (controller) { controller.abort() @@ -287,12 +334,6 @@ export function useExecutionStream() { } currentExecutionsRef.current.delete(workflowId) } else { - for (const [, execution] of currentExecutionsRef.current) { - fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, { - method: 'POST', - }).catch(() => {}) - } - for (const [, controller] of abortControllersRef.current) { controller.abort() } @@ -304,6 +345,7 @@ export function useExecutionStream() { return { execute, executeFromBlock, + reconnect, cancel, } } diff --git a/apps/sim/lib/execution/event-buffer.ts b/apps/sim/lib/execution/event-buffer.ts new file mode 100644 index 0000000000..14f2685d26 --- /dev/null +++ b/apps/sim/lib/execution/event-buffer.ts @@ -0,0 +1,228 @@ +import { createLogger } from '@sim/logger' +import { getRedisClient } from '@/lib/core/config/redis' + +const logger = createLogger('ExecutionEventBuffer') + +const REDIS_PREFIX = 'execution:stream:' +const TTL_SECONDS = 60 * 60 // 1 hour +const EVENT_LIMIT = 1000 +const RESERVE_BATCH = 100 +const FLUSH_INTERVAL_MS = 15 +const FLUSH_MAX_BATCH = 200 + +function getEventsKey(executionId: string) { + return `${REDIS_PREFIX}${executionId}:events` +} + +function getSeqKey(executionId: string) { + return `${REDIS_PREFIX}${executionId}:seq` +} + +function getMetaKey(executionId: string) { + return `${REDIS_PREFIX}${executionId}:meta` +} + +export type ExecutionStreamStatus = 'active' | 'complete' | 'error' | 'cancelled' + +export interface ExecutionStreamMeta { + status: ExecutionStreamStatus + userId?: string + workflowId?: string + updatedAt?: string +} + +export interface ExecutionEventEntry { + eventId: number + executionId: string + event: Record +} + +export interface ExecutionEventWriter { + write: (event: Record) => Promise + flush: () => Promise + close: () => Promise +} + +export async function setExecutionMeta( + executionId: string, + meta: Partial +): Promise { + const redis = getRedisClient() + if (!redis) { + logger.warn('setExecutionMeta: Redis client unavailable', { executionId }) + return + } + try { + const key = getMetaKey(executionId) + const payload: Record = { + updatedAt: new Date().toISOString(), + } + if (meta.status) payload.status = meta.status + if (meta.userId) payload.userId = meta.userId + if (meta.workflowId) payload.workflowId = meta.workflowId + await redis.hset(key, payload) + await redis.expire(key, TTL_SECONDS) + } catch (error) { + logger.warn('Failed to update execution meta', { + executionId, + error: error instanceof Error ? error.message : String(error), + }) + } +} + +export async function getExecutionMeta(executionId: string): Promise { + const redis = getRedisClient() + if (!redis) { + logger.warn('getExecutionMeta: Redis client unavailable', { executionId }) + return null + } + try { + const key = getMetaKey(executionId) + const meta = await redis.hgetall(key) + if (!meta || Object.keys(meta).length === 0) return null + return meta as unknown as ExecutionStreamMeta + } catch (error) { + logger.warn('Failed to read execution meta', { + executionId, + error: error instanceof Error ? error.message : String(error), + }) + return null + } +} + +export async function readExecutionEvents( + executionId: string, + afterEventId: number +): Promise { + const redis = getRedisClient() + if (!redis) return [] + try { + const raw = await redis.zrangebyscore(getEventsKey(executionId), afterEventId + 1, '+inf') + return raw + .map((entry) => { + try { + return JSON.parse(entry) as ExecutionEventEntry + } catch { + return null + } + }) + .filter((entry): entry is ExecutionEventEntry => Boolean(entry)) + } catch (error) { + logger.warn('Failed to read execution events', { + executionId, + error: error instanceof Error ? error.message : String(error), + }) + return [] + } +} + +export function createExecutionEventWriter(executionId: string): ExecutionEventWriter { + const redis = getRedisClient() + if (!redis) { + logger.warn( + 'createExecutionEventWriter: Redis client unavailable, events will not be buffered', + { + executionId, + } + ) + return { + write: async (event) => ({ eventId: 0, executionId, event }), + flush: async () => {}, + close: async () => {}, + } + } + + let pending: ExecutionEventEntry[] = [] + let nextEventId = 0 + let maxReservedId = 0 + let flushTimer: ReturnType | null = null + + const scheduleFlush = () => { + if (flushTimer) return + flushTimer = setTimeout(() => { + flushTimer = null + void flush() + }, FLUSH_INTERVAL_MS) + } + + const reserveIds = async (minCount: number) => { + const reserveCount = Math.max(RESERVE_BATCH, minCount) + const newMax = await redis.incrby(getSeqKey(executionId), reserveCount) + const startId = newMax - reserveCount + 1 + if (nextEventId === 0 || nextEventId > maxReservedId) { + nextEventId = startId + maxReservedId = newMax + } + } + + let flushPromise: Promise | null = null + let closed = false + + const doFlush = async () => { + if (pending.length === 0) return + const batch = pending + pending = [] + try { + const key = getEventsKey(executionId) + const zaddArgs: (string | number)[] = [] + for (const entry of batch) { + zaddArgs.push(entry.eventId, JSON.stringify(entry)) + } + const pipeline = redis.pipeline() + pipeline.zadd(key, ...(zaddArgs as [number, string])) + pipeline.expire(key, TTL_SECONDS) + pipeline.expire(getSeqKey(executionId), TTL_SECONDS) + pipeline.zremrangebyrank(key, 0, -EVENT_LIMIT - 1) + await pipeline.exec() + } catch (error) { + logger.warn('Failed to flush execution events', { + executionId, + batchSize: batch.length, + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + }) + pending = batch.concat(pending) + } + } + + const flush = async () => { + if (flushPromise) { + await flushPromise + return + } + flushPromise = doFlush() + try { + await flushPromise + } finally { + flushPromise = null + if (pending.length > 0) scheduleFlush() + } + } + + const write = async (event: Record) => { + if (closed) return { eventId: 0, executionId, event } + if (nextEventId === 0 || nextEventId > maxReservedId) { + await reserveIds(1) + } + const eventId = nextEventId++ + const entry: ExecutionEventEntry = { eventId, executionId, event } + pending.push(entry) + if (pending.length >= FLUSH_MAX_BATCH) { + await flush() + } else { + scheduleFlush() + } + return entry + } + + const close = async () => { + closed = true + if (flushTimer) { + clearTimeout(flushTimer) + flushTimer = null + } + await flush() + } + + return { write, flush, close } +} diff --git a/apps/sim/stores/execution/store.ts b/apps/sim/stores/execution/store.ts index 6983ddcda1..b82d4a3c50 100644 --- a/apps/sim/stores/execution/store.ts +++ b/apps/sim/stores/execution/store.ts @@ -129,6 +129,18 @@ export const useExecutionStore = create()((se }) }, + setCurrentExecutionId: (workflowId, executionId) => { + set({ + workflowExecutions: updatedMap(get().workflowExecutions, workflowId, { + currentExecutionId: executionId, + }), + }) + }, + + getCurrentExecutionId: (workflowId) => { + return getOrCreate(get().workflowExecutions, workflowId).currentExecutionId + }, + clearRunPath: (workflowId) => { set({ workflowExecutions: updatedMap(get().workflowExecutions, workflowId, { diff --git a/apps/sim/stores/execution/types.ts b/apps/sim/stores/execution/types.ts index 55d873b492..b36ea43a19 100644 --- a/apps/sim/stores/execution/types.ts +++ b/apps/sim/stores/execution/types.ts @@ -35,6 +35,8 @@ export interface WorkflowExecutionState { lastRunPath: Map /** Maps edge IDs to their run result from the last execution */ lastRunEdges: Map + /** The execution ID of the currently running execution */ + currentExecutionId: string | null } /** @@ -54,6 +56,7 @@ export const defaultWorkflowExecutionState: WorkflowExecutionState = { debugContext: null, lastRunPath: new Map(), lastRunEdges: new Map(), + currentExecutionId: null, } /** @@ -96,6 +99,10 @@ export interface ExecutionActions { setEdgeRunStatus: (workflowId: string, edgeId: string, status: EdgeRunStatus) => void /** Clears the run path and run edges for a workflow */ clearRunPath: (workflowId: string) => void + /** Stores the current execution ID for a workflow */ + setCurrentExecutionId: (workflowId: string, executionId: string | null) => void + /** Returns the current execution ID for a workflow */ + getCurrentExecutionId: (workflowId: string) => string | null /** Resets the entire store to its initial empty state */ reset: () => void /** Stores a serializable execution snapshot for a workflow */ diff --git a/apps/sim/stores/terminal/console/store.ts b/apps/sim/stores/terminal/console/store.ts index 55b59b135e..9fddbf3efd 100644 --- a/apps/sim/stores/terminal/console/store.ts +++ b/apps/sim/stores/terminal/console/store.ts @@ -224,7 +224,7 @@ export const useTerminalConsoleStore = create()( const newEntry = get().entries[0] - if (newEntry?.error) { + if (newEntry?.error && newEntry.blockType !== 'cancelled') { notifyBlockError({ error: newEntry.error, blockName: newEntry.blockName || 'Unknown Block', @@ -243,6 +243,11 @@ export const useTerminalConsoleStore = create()( useExecutionStore.getState().clearRunPath(workflowId) }, + clearExecutionEntries: (executionId: string) => + set((state) => ({ + entries: state.entries.filter((e) => e.executionId !== executionId), + })), + exportConsoleCSV: (workflowId: string) => { const entries = get().entries.filter((entry) => entry.workflowId === workflowId) @@ -470,12 +475,24 @@ export const useTerminalConsoleStore = create()( }, merge: (persistedState, currentState) => { const persisted = persistedState as Partial | undefined - const entries = (persisted?.entries ?? currentState.entries).map((entry, index) => { + const rawEntries = persisted?.entries ?? currentState.entries + const oneHourAgo = Date.now() - 60 * 60 * 1000 + + const entries = rawEntries.map((entry, index) => { + let updated = entry if (entry.executionOrder === undefined) { - return { ...entry, executionOrder: index + 1 } + updated = { ...updated, executionOrder: index + 1 } } - return entry + if ( + entry.isRunning && + entry.startedAt && + new Date(entry.startedAt).getTime() < oneHourAgo + ) { + updated = { ...updated, isRunning: false } + } + return updated }) + return { ...currentState, entries, diff --git a/apps/sim/stores/terminal/console/types.ts b/apps/sim/stores/terminal/console/types.ts index f15f363771..e057854d8c 100644 --- a/apps/sim/stores/terminal/console/types.ts +++ b/apps/sim/stores/terminal/console/types.ts @@ -51,6 +51,7 @@ export interface ConsoleStore { isOpen: boolean addConsole: (entry: Omit) => ConsoleEntry clearWorkflowConsole: (workflowId: string) => void + clearExecutionEntries: (executionId: string) => void exportConsoleCSV: (workflowId: string) => void getWorkflowEntries: (workflowId: string) => ConsoleEntry[] toggleConsole: () => void From fde8f97aff3fe97ae6fa30f13bbbc7caf1e7e68e Mon Sep 17 00:00:00 2001 From: waleed Date: Wed, 11 Feb 2026 13:52:48 -0800 Subject: [PATCH 02/14] fix(terminal): use ExecutionEvent type instead of any in reconnection stream --- .../[id]/executions/[executionId]/stream/route.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts b/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts index 254831c105..c0bb471cca 100644 --- a/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts +++ b/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts @@ -7,7 +7,7 @@ import { getExecutionMeta, readExecutionEvents, } from '@/lib/execution/event-buffer' -import { formatSSEEvent } from '@/lib/workflows/executor/execution-events' +import { type ExecutionEvent, formatSSEEvent } from '@/lib/workflows/executor/execution-events' import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils' const logger = createLogger('ExecutionStreamReconnectAPI') @@ -88,7 +88,7 @@ export async function GET( const events = await readExecutionEvents(executionId, lastEventId) for (const entry of events) { if (closed) return - enqueue(formatSSEEvent(entry.event as any)) + enqueue(formatSSEEvent(entry.event as unknown as ExecutionEvent)) lastEventId = entry.eventId } @@ -108,7 +108,7 @@ export async function GET( const newEvents = await readExecutionEvents(executionId, lastEventId) for (const entry of newEvents) { if (closed) return - enqueue(formatSSEEvent(entry.event as any)) + enqueue(formatSSEEvent(entry.event as unknown as ExecutionEvent)) lastEventId = entry.eventId } @@ -118,7 +118,7 @@ export async function GET( const finalEvents = await readExecutionEvents(executionId, lastEventId) for (const entry of finalEvents) { if (closed) return - enqueue(formatSSEEvent(entry.event as any)) + enqueue(formatSSEEvent(entry.event as unknown as ExecutionEvent)) lastEventId = entry.eventId } enqueue('data: [DONE]\n\n') From a5e8a30892055490c9941dfd5d0801d0f6bd77e8 Mon Sep 17 00:00:00 2001 From: waleed Date: Wed, 11 Feb 2026 13:57:02 -0800 Subject: [PATCH 03/14] fix(execution): type event buffer with ExecutionEvent instead of Record Co-Authored-By: Claude Opus 4.6 --- apps/sim/app/api/workflows/[id]/execute/route.ts | 2 +- .../[id]/executions/[executionId]/stream/route.ts | 8 ++++---- apps/sim/lib/execution/event-buffer.ts | 7 ++++--- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/apps/sim/app/api/workflows/[id]/execute/route.ts b/apps/sim/app/api/workflows/[id]/execute/route.ts index ec857deb38..a3c77e7f29 100644 --- a/apps/sim/app/api/workflows/[id]/execute/route.ts +++ b/apps/sim/app/api/workflows/[id]/execute/route.ts @@ -720,7 +720,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: } } if (event.type !== 'stream:chunk' && event.type !== 'stream:done') { - eventWriter.write(event as unknown as Record).catch(() => {}) + eventWriter.write(event).catch(() => {}) } } diff --git a/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts b/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts index c0bb471cca..533879909a 100644 --- a/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts +++ b/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts @@ -7,7 +7,7 @@ import { getExecutionMeta, readExecutionEvents, } from '@/lib/execution/event-buffer' -import { type ExecutionEvent, formatSSEEvent } from '@/lib/workflows/executor/execution-events' +import { formatSSEEvent } from '@/lib/workflows/executor/execution-events' import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils' const logger = createLogger('ExecutionStreamReconnectAPI') @@ -88,7 +88,7 @@ export async function GET( const events = await readExecutionEvents(executionId, lastEventId) for (const entry of events) { if (closed) return - enqueue(formatSSEEvent(entry.event as unknown as ExecutionEvent)) + enqueue(formatSSEEvent(entry.event)) lastEventId = entry.eventId } @@ -108,7 +108,7 @@ export async function GET( const newEvents = await readExecutionEvents(executionId, lastEventId) for (const entry of newEvents) { if (closed) return - enqueue(formatSSEEvent(entry.event as unknown as ExecutionEvent)) + enqueue(formatSSEEvent(entry.event)) lastEventId = entry.eventId } @@ -118,7 +118,7 @@ export async function GET( const finalEvents = await readExecutionEvents(executionId, lastEventId) for (const entry of finalEvents) { if (closed) return - enqueue(formatSSEEvent(entry.event as unknown as ExecutionEvent)) + enqueue(formatSSEEvent(entry.event)) lastEventId = entry.eventId } enqueue('data: [DONE]\n\n') diff --git a/apps/sim/lib/execution/event-buffer.ts b/apps/sim/lib/execution/event-buffer.ts index 14f2685d26..e79c29c05e 100644 --- a/apps/sim/lib/execution/event-buffer.ts +++ b/apps/sim/lib/execution/event-buffer.ts @@ -1,5 +1,6 @@ import { createLogger } from '@sim/logger' import { getRedisClient } from '@/lib/core/config/redis' +import type { ExecutionEvent } from '@/lib/workflows/executor/execution-events' const logger = createLogger('ExecutionEventBuffer') @@ -34,11 +35,11 @@ export interface ExecutionStreamMeta { export interface ExecutionEventEntry { eventId: number executionId: string - event: Record + event: ExecutionEvent } export interface ExecutionEventWriter { - write: (event: Record) => Promise + write: (event: ExecutionEvent) => Promise flush: () => Promise close: () => Promise } @@ -199,7 +200,7 @@ export function createExecutionEventWriter(executionId: string): ExecutionEventW } } - const write = async (event: Record) => { + const write = async (event: ExecutionEvent) => { if (closed) return { eventId: 0, executionId, event } if (nextEventId === 0 || nextEventId > maxReservedId) { await reserveIds(1) From ef477bacb1234bb7fd8c28a2db34cf25bcab3dc8 Mon Sep 17 00:00:00 2001 From: waleed Date: Wed, 11 Feb 2026 14:29:47 -0800 Subject: [PATCH 04/14] fix(execution): validate fromEventId query param in reconnection endpoint Co-Authored-By: Claude Opus 4.6 --- .../workflows/[id]/executions/[executionId]/stream/route.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts b/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts index 533879909a..bb55c62b4b 100644 --- a/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts +++ b/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts @@ -58,7 +58,8 @@ export async function GET( } const fromParam = req.nextUrl.searchParams.get('from') - const fromEventId = fromParam ? Number.parseInt(fromParam, 10) : 0 + const parsed = fromParam ? Number.parseInt(fromParam, 10) : 0 + const fromEventId = Number.isFinite(parsed) && parsed >= 0 ? parsed : 0 logger.info('Reconnection stream requested', { workflowId, From 986cef37db226aa1d29197c2df3dc87fb771d113 Mon Sep 17 00:00:00 2001 From: Siddharth Ganesan Date: Wed, 11 Feb 2026 14:15:00 -0800 Subject: [PATCH 05/14] Fix some bugs --- .tmp_190f_chat_route.ts | 585 +++ .tmp_190f_mention_constants.ts | 281 ++ .tmp_190f_types.ts | 273 ++ .tmp_old_chat_lifecycle.ts | 0 .tmp_old_client_manager.ts | 24 + .tmp_old_client_registry.ts | 34 + .tmp_old_client_sse_handlers.ts | 0 .tmp_old_constants.ts | 280 ++ .tmp_old_copilot_api.ts | 186 + .tmp_old_execute_copilot_server_tool_route.ts | 54 + .tmp_old_execute_server_tool_route.ts | 54 + .tmp_old_execute_tool_route.ts | 247 + .tmp_old_get_examples_rag.ts | 52 + .tmp_old_get_operations_examples.ts | 58 + .tmp_old_get_trigger_examples.ts | 31 + .tmp_old_init_tool_configs.ts | 36 + .tmp_old_manage_custom_tool_client.ts | 408 ++ .tmp_old_manage_mcp_tool_client.ts | 360 ++ .tmp_old_oauth_request_access_client.ts | 174 + .tmp_old_process_contents.ts | 555 +++ .tmp_old_shared_schemas.ts | 178 + .tmp_old_store.ts | 4239 +++++++++++++++++ .tmp_old_tools_registry.ts | 3480 ++++++++++++++ .tmp_old_types.ts | 261 + apps/sim/app/api/copilot/chat/route.ts | 51 +- apps/sim/lib/copilot/chat-payload.ts | 6 + .../copilot/orchestrator/sse-handlers.test.ts | 45 + .../orchestrator/sse-handlers/handlers.ts | 15 + .../orchestrator/tool-executor/index.ts | 257 +- apps/sim/stores/panel/copilot/store.ts | 81 +- 30 files changed, 12283 insertions(+), 22 deletions(-) create mode 100644 .tmp_190f_chat_route.ts create mode 100644 .tmp_190f_mention_constants.ts create mode 100644 .tmp_190f_types.ts create mode 100644 .tmp_old_chat_lifecycle.ts create mode 100644 .tmp_old_client_manager.ts create mode 100644 .tmp_old_client_registry.ts create mode 100644 .tmp_old_client_sse_handlers.ts create mode 100644 .tmp_old_constants.ts create mode 100644 .tmp_old_copilot_api.ts create mode 100644 .tmp_old_execute_copilot_server_tool_route.ts create mode 100644 .tmp_old_execute_server_tool_route.ts create mode 100644 .tmp_old_execute_tool_route.ts create mode 100644 .tmp_old_get_examples_rag.ts create mode 100644 .tmp_old_get_operations_examples.ts create mode 100644 .tmp_old_get_trigger_examples.ts create mode 100644 .tmp_old_init_tool_configs.ts create mode 100644 .tmp_old_manage_custom_tool_client.ts create mode 100644 .tmp_old_manage_mcp_tool_client.ts create mode 100644 .tmp_old_oauth_request_access_client.ts create mode 100644 .tmp_old_process_contents.ts create mode 100644 .tmp_old_shared_schemas.ts create mode 100644 .tmp_old_store.ts create mode 100644 .tmp_old_tools_registry.ts create mode 100644 .tmp_old_types.ts diff --git a/.tmp_190f_chat_route.ts b/.tmp_190f_chat_route.ts new file mode 100644 index 0000000000..248298348c --- /dev/null +++ b/.tmp_190f_chat_route.ts @@ -0,0 +1,585 @@ +import { db } from '@sim/db' +import { copilotChats } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, desc, eq } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { buildConversationHistory } from '@/lib/copilot/chat-context' +import { resolveOrCreateChat } from '@/lib/copilot/chat-lifecycle' +import { buildCopilotRequestPayload } from '@/lib/copilot/chat-payload' +import { generateChatTitle } from '@/lib/copilot/chat-title' +import { getCopilotModel } from '@/lib/copilot/config' +import { COPILOT_MODEL_IDS, COPILOT_REQUEST_MODES } from '@/lib/copilot/models' +import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator' +import { + createStreamEventWriter, + resetStreamBuffer, + setStreamMeta, +} from '@/lib/copilot/orchestrator/stream-buffer' +import { + authenticateCopilotRequestSessionOnly, + createBadRequestResponse, + createInternalServerErrorResponse, + createRequestTracker, + createUnauthorizedResponse, +} from '@/lib/copilot/request-helpers' +import { env } from '@/lib/core/config/env' +import { resolveWorkflowIdForUser } from '@/lib/workflows/utils' + +const logger = createLogger('CopilotChatAPI') + +const FileAttachmentSchema = z.object({ + id: z.string(), + key: z.string(), + filename: z.string(), + media_type: z.string(), + size: z.number(), +}) + +const ChatMessageSchema = z.object({ + message: z.string().min(1, 'Message is required'), + userMessageId: z.string().optional(), // ID from frontend for the user message + chatId: z.string().optional(), + workflowId: z.string().optional(), + workflowName: z.string().optional(), + model: z.enum(COPILOT_MODEL_IDS).optional().default('claude-4.6-opus'), + mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'), + prefetch: z.boolean().optional(), + createNewChat: z.boolean().optional().default(false), + stream: z.boolean().optional().default(true), + implicitFeedback: z.string().optional(), + fileAttachments: z.array(FileAttachmentSchema).optional(), + provider: z.string().optional().default('openai'), + conversationId: z.string().optional(), + contexts: z + .array( + z.object({ + kind: z.enum([ + 'past_chat', + 'workflow', + 'current_workflow', + 'blocks', + 'logs', + 'workflow_block', + 'knowledge', + 'templates', + 'docs', + ]), + label: z.string(), + chatId: z.string().optional(), + workflowId: z.string().optional(), + knowledgeId: z.string().optional(), + blockId: z.string().optional(), + templateId: z.string().optional(), + executionId: z.string().optional(), + // For workflow_block, provide both workflowId and blockId + }) + ) + .optional(), + commands: z.array(z.string()).optional(), +}) + +/** + * POST /api/copilot/chat + * Send messages to sim agent and handle chat persistence + */ +export async function POST(req: NextRequest) { + const tracker = createRequestTracker() + + try { + // Get session to access user information including name + const session = await getSession() + + if (!session?.user?.id) { + return createUnauthorizedResponse() + } + + const authenticatedUserId = session.user.id + + const body = await req.json() + const { + message, + userMessageId, + chatId, + workflowId: providedWorkflowId, + workflowName, + model, + mode, + prefetch, + createNewChat, + stream, + implicitFeedback, + fileAttachments, + provider, + conversationId, + contexts, + commands, + } = ChatMessageSchema.parse(body) + + // Resolve workflowId - if not provided, use first workflow or find by name + const resolved = await resolveWorkflowIdForUser( + authenticatedUserId, + providedWorkflowId, + workflowName + ) + if (!resolved) { + return createBadRequestResponse( + 'No workflows found. Create a workflow first or provide a valid workflowId.' + ) + } + const workflowId = resolved.workflowId + + // Ensure we have a consistent user message ID for this request + const userMessageIdToUse = userMessageId || crypto.randomUUID() + try { + logger.info(`[${tracker.requestId}] Received chat POST`, { + hasContexts: Array.isArray(contexts), + contextsCount: Array.isArray(contexts) ? contexts.length : 0, + contextsPreview: Array.isArray(contexts) + ? contexts.map((c: any) => ({ + kind: c?.kind, + chatId: c?.chatId, + workflowId: c?.workflowId, + executionId: (c as any)?.executionId, + label: c?.label, + })) + : undefined, + }) + } catch {} + // Preprocess contexts server-side + let agentContexts: Array<{ type: string; content: string }> = [] + if (Array.isArray(contexts) && contexts.length > 0) { + try { + const { processContextsServer } = await import('@/lib/copilot/process-contents') + const processed = await processContextsServer(contexts as any, authenticatedUserId, message) + agentContexts = processed + logger.info(`[${tracker.requestId}] Contexts processed for request`, { + processedCount: agentContexts.length, + kinds: agentContexts.map((c) => c.type), + lengthPreview: agentContexts.map((c) => c.content?.length ?? 0), + }) + if (Array.isArray(contexts) && contexts.length > 0 && agentContexts.length === 0) { + logger.warn( + `[${tracker.requestId}] Contexts provided but none processed. Check executionId for logs contexts.` + ) + } + } catch (e) { + logger.error(`[${tracker.requestId}] Failed to process contexts`, e) + } + } + + // Handle chat context + let currentChat: any = null + let conversationHistory: any[] = [] + let actualChatId = chatId + + if (chatId || createNewChat) { + const defaultsForChatRow = getCopilotModel('chat') + const chatResult = await resolveOrCreateChat({ + chatId, + userId: authenticatedUserId, + workflowId, + model: defaultsForChatRow.model, + }) + currentChat = chatResult.chat + actualChatId = chatResult.chatId || chatId + const history = buildConversationHistory( + chatResult.conversationHistory, + (chatResult.chat?.conversationId as string | undefined) || conversationId + ) + conversationHistory = history.history + } + + const defaults = getCopilotModel('chat') + const selectedModel = model || defaults.model + const effectiveMode = mode === 'agent' ? 'build' : mode + const effectiveConversationId = + (currentChat?.conversationId as string | undefined) || conversationId + + const requestPayload = await buildCopilotRequestPayload( + { + message, + workflowId, + userId: authenticatedUserId, + userMessageId: userMessageIdToUse, + mode, + model: selectedModel, + conversationHistory, + contexts: agentContexts, + fileAttachments, + commands, + chatId: actualChatId, + implicitFeedback, + }, + { + selectedModel, + } + ) + + try { + logger.info(`[${tracker.requestId}] About to call Sim Agent`, { + hasContext: agentContexts.length > 0, + contextCount: agentContexts.length, + hasConversationId: !!effectiveConversationId, + hasFileAttachments: Array.isArray(requestPayload.fileAttachments), + messageLength: message.length, + mode: effectiveMode, + hasTools: Array.isArray(requestPayload.tools), + toolCount: Array.isArray(requestPayload.tools) ? requestPayload.tools.length : 0, + hasBaseTools: Array.isArray(requestPayload.baseTools), + baseToolCount: Array.isArray(requestPayload.baseTools) + ? requestPayload.baseTools.length + : 0, + hasCredentials: !!requestPayload.credentials, + }) + } catch {} + + if (stream) { + const streamId = userMessageIdToUse + let eventWriter: ReturnType | null = null + let clientDisconnected = false + const transformedStream = new ReadableStream({ + async start(controller) { + const encoder = new TextEncoder() + + await resetStreamBuffer(streamId) + await setStreamMeta(streamId, { status: 'active', userId: authenticatedUserId }) + eventWriter = createStreamEventWriter(streamId) + + const shouldFlushEvent = (event: Record) => + event.type === 'tool_call' || + event.type === 'tool_result' || + event.type === 'tool_error' || + event.type === 'subagent_end' || + event.type === 'structured_result' || + event.type === 'subagent_result' || + event.type === 'done' || + event.type === 'error' + + const pushEvent = async (event: Record) => { + if (!eventWriter) return + const entry = await eventWriter.write(event) + if (shouldFlushEvent(event)) { + await eventWriter.flush() + } + const payload = { + ...event, + eventId: entry.eventId, + streamId, + } + try { + if (!clientDisconnected) { + controller.enqueue(encoder.encode(`data: ${JSON.stringify(payload)}\n\n`)) + } + } catch { + clientDisconnected = true + await eventWriter.flush() + } + } + + if (actualChatId) { + await pushEvent({ type: 'chat_id', chatId: actualChatId }) + } + + if (actualChatId && !currentChat?.title && conversationHistory.length === 0) { + generateChatTitle(message) + .then(async (title) => { + if (title) { + await db + .update(copilotChats) + .set({ + title, + updatedAt: new Date(), + }) + .where(eq(copilotChats.id, actualChatId!)) + await pushEvent({ type: 'title_updated', title }) + } + }) + .catch((error) => { + logger.error(`[${tracker.requestId}] Title generation failed:`, error) + }) + } + + try { + const result = await orchestrateCopilotStream(requestPayload, { + userId: authenticatedUserId, + workflowId, + chatId: actualChatId, + autoExecuteTools: true, + interactive: true, + onEvent: async (event) => { + await pushEvent(event) + }, + }) + + if (currentChat && result.conversationId) { + await db + .update(copilotChats) + .set({ + updatedAt: new Date(), + conversationId: result.conversationId, + }) + .where(eq(copilotChats.id, actualChatId!)) + } + await eventWriter.close() + await setStreamMeta(streamId, { status: 'complete', userId: authenticatedUserId }) + } catch (error) { + logger.error(`[${tracker.requestId}] Orchestration error:`, error) + await eventWriter.close() + await setStreamMeta(streamId, { + status: 'error', + userId: authenticatedUserId, + error: error instanceof Error ? error.message : 'Stream error', + }) + await pushEvent({ + type: 'error', + data: { + displayMessage: 'An unexpected error occurred while processing the response.', + }, + }) + } finally { + controller.close() + } + }, + async cancel() { + clientDisconnected = true + if (eventWriter) { + await eventWriter.flush() + } + }, + }) + + return new Response(transformedStream, { + headers: { + 'Content-Type': 'text/event-stream', + 'Cache-Control': 'no-cache', + Connection: 'keep-alive', + 'X-Accel-Buffering': 'no', + }, + }) + } + + const nonStreamingResult = await orchestrateCopilotStream(requestPayload, { + userId: authenticatedUserId, + workflowId, + chatId: actualChatId, + autoExecuteTools: true, + interactive: true, + }) + + const responseData = { + content: nonStreamingResult.content, + toolCalls: nonStreamingResult.toolCalls, + model: selectedModel, + provider: + (requestPayload?.provider as Record)?.provider || + env.COPILOT_PROVIDER || + 'openai', + } + + logger.info(`[${tracker.requestId}] Non-streaming response from orchestrator:`, { + hasContent: !!responseData.content, + contentLength: responseData.content?.length || 0, + model: responseData.model, + provider: responseData.provider, + toolCallsCount: responseData.toolCalls?.length || 0, + }) + + // Save messages if we have a chat + if (currentChat && responseData.content) { + const userMessage = { + id: userMessageIdToUse, // Consistent ID used for request and persistence + role: 'user', + content: message, + timestamp: new Date().toISOString(), + ...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }), + ...(Array.isArray(contexts) && contexts.length > 0 && { contexts }), + ...(Array.isArray(contexts) && + contexts.length > 0 && { + contentBlocks: [{ type: 'contexts', contexts: contexts as any, timestamp: Date.now() }], + }), + } + + const assistantMessage = { + id: crypto.randomUUID(), + role: 'assistant', + content: responseData.content, + timestamp: new Date().toISOString(), + } + + const updatedMessages = [...conversationHistory, userMessage, assistantMessage] + + // Start title generation in parallel if this is first message (non-streaming) + if (actualChatId && !currentChat.title && conversationHistory.length === 0) { + logger.info(`[${tracker.requestId}] Starting title generation for non-streaming response`) + generateChatTitle(message) + .then(async (title) => { + if (title) { + await db + .update(copilotChats) + .set({ + title, + updatedAt: new Date(), + }) + .where(eq(copilotChats.id, actualChatId!)) + logger.info(`[${tracker.requestId}] Generated and saved title: ${title}`) + } + }) + .catch((error) => { + logger.error(`[${tracker.requestId}] Title generation failed:`, error) + }) + } + + // Update chat in database immediately (without blocking for title) + await db + .update(copilotChats) + .set({ + messages: updatedMessages, + updatedAt: new Date(), + ...(nonStreamingResult.conversationId + ? { conversationId: nonStreamingResult.conversationId } + : {}), + }) + .where(eq(copilotChats.id, actualChatId!)) + } + + logger.info(`[${tracker.requestId}] Returning non-streaming response`, { + duration: tracker.getDuration(), + chatId: actualChatId, + responseLength: responseData.content?.length || 0, + }) + + return NextResponse.json({ + success: true, + response: responseData, + chatId: actualChatId, + metadata: { + requestId: tracker.requestId, + message, + duration: tracker.getDuration(), + }, + }) + } catch (error) { + const duration = tracker.getDuration() + + if (error instanceof z.ZodError) { + logger.error(`[${tracker.requestId}] Validation error:`, { + duration, + errors: error.errors, + }) + return NextResponse.json( + { error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${tracker.requestId}] Error handling copilot chat:`, { + duration, + error: error instanceof Error ? error.message : 'Unknown error', + stack: error instanceof Error ? error.stack : undefined, + }) + + return NextResponse.json( + { error: error instanceof Error ? error.message : 'Internal server error' }, + { status: 500 } + ) + } +} + +export async function GET(req: NextRequest) { + try { + const { searchParams } = new URL(req.url) + const workflowId = searchParams.get('workflowId') + const chatId = searchParams.get('chatId') + + // Get authenticated user using consolidated helper + const { userId: authenticatedUserId, isAuthenticated } = + await authenticateCopilotRequestSessionOnly() + if (!isAuthenticated || !authenticatedUserId) { + return createUnauthorizedResponse() + } + + // If chatId is provided, fetch a single chat + if (chatId) { + const [chat] = await db + .select({ + id: copilotChats.id, + title: copilotChats.title, + model: copilotChats.model, + messages: copilotChats.messages, + planArtifact: copilotChats.planArtifact, + config: copilotChats.config, + createdAt: copilotChats.createdAt, + updatedAt: copilotChats.updatedAt, + }) + .from(copilotChats) + .where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, authenticatedUserId))) + .limit(1) + + if (!chat) { + return NextResponse.json({ success: false, error: 'Chat not found' }, { status: 404 }) + } + + const transformedChat = { + id: chat.id, + title: chat.title, + model: chat.model, + messages: Array.isArray(chat.messages) ? chat.messages : [], + messageCount: Array.isArray(chat.messages) ? chat.messages.length : 0, + planArtifact: chat.planArtifact || null, + config: chat.config || null, + createdAt: chat.createdAt, + updatedAt: chat.updatedAt, + } + + logger.info(`Retrieved chat ${chatId}`) + return NextResponse.json({ success: true, chat: transformedChat }) + } + + if (!workflowId) { + return createBadRequestResponse('workflowId or chatId is required') + } + + // Fetch chats for this user and workflow + const chats = await db + .select({ + id: copilotChats.id, + title: copilotChats.title, + model: copilotChats.model, + messages: copilotChats.messages, + planArtifact: copilotChats.planArtifact, + config: copilotChats.config, + createdAt: copilotChats.createdAt, + updatedAt: copilotChats.updatedAt, + }) + .from(copilotChats) + .where( + and(eq(copilotChats.userId, authenticatedUserId), eq(copilotChats.workflowId, workflowId)) + ) + .orderBy(desc(copilotChats.updatedAt)) + + // Transform the data to include message count + const transformedChats = chats.map((chat) => ({ + id: chat.id, + title: chat.title, + model: chat.model, + messages: Array.isArray(chat.messages) ? chat.messages : [], + messageCount: Array.isArray(chat.messages) ? chat.messages.length : 0, + planArtifact: chat.planArtifact || null, + config: chat.config || null, + createdAt: chat.createdAt, + updatedAt: chat.updatedAt, + })) + + logger.info(`Retrieved ${transformedChats.length} chats for workflow ${workflowId}`) + + return NextResponse.json({ + success: true, + chats: transformedChats, + }) + } catch (error) { + logger.error('Error fetching copilot chats:', error) + return createInternalServerErrorResponse('Failed to fetch chats') + } +} diff --git a/.tmp_190f_mention_constants.ts b/.tmp_190f_mention_constants.ts new file mode 100644 index 0000000000..faff318f9f --- /dev/null +++ b/.tmp_190f_mention_constants.ts @@ -0,0 +1,281 @@ +import type { ChatContext } from '@/stores/panel' + +/** + * Mention folder types + */ +export type MentionFolderId = + | 'chats' + | 'workflows' + | 'knowledge' + | 'blocks' + | 'workflow-blocks' + | 'templates' + | 'logs' + +/** + * Menu item category types for mention menu (includes folders + docs item) + */ +export type MentionCategory = MentionFolderId | 'docs' + +/** + * Configuration interface for folder types + */ +export interface FolderConfig { + /** Display title in menu */ + title: string + /** Data source key in useMentionData return */ + dataKey: string + /** Loading state key in useMentionData return */ + loadingKey: string + /** Ensure loaded function key in useMentionData return (optional - some folders auto-load) */ + ensureLoadedKey?: string + /** Extract label from an item */ + getLabel: (item: TItem) => string + /** Extract unique ID from an item */ + getId: (item: TItem) => string + /** Empty state message */ + emptyMessage: string + /** No match message (when filtering) */ + noMatchMessage: string + /** Filter function for matching query */ + filterFn: (item: TItem, query: string) => boolean + /** Build the ChatContext object from an item */ + buildContext: (item: TItem, workflowId?: string | null) => ChatContext + /** Whether to use insertAtCursor fallback when replaceActiveMentionWith fails */ + useInsertFallback?: boolean +} + +/** + * Configuration for all folder types in the mention menu + */ +export const FOLDER_CONFIGS: Record = { + chats: { + title: 'Chats', + dataKey: 'pastChats', + loadingKey: 'isLoadingPastChats', + ensureLoadedKey: 'ensurePastChatsLoaded', + getLabel: (item) => item.title || 'New Chat', + getId: (item) => item.id, + emptyMessage: 'No past chats', + noMatchMessage: 'No matching chats', + filterFn: (item, q) => (item.title || 'New Chat').toLowerCase().includes(q), + buildContext: (item) => ({ + kind: 'past_chat', + chatId: item.id, + label: item.title || 'New Chat', + }), + useInsertFallback: false, + }, + workflows: { + title: 'All workflows', + dataKey: 'workflows', + loadingKey: 'isLoadingWorkflows', + // No ensureLoadedKey - workflows auto-load from registry store + getLabel: (item) => item.name || 'Untitled Workflow', + getId: (item) => item.id, + emptyMessage: 'No workflows', + noMatchMessage: 'No matching workflows', + filterFn: (item, q) => (item.name || 'Untitled Workflow').toLowerCase().includes(q), + buildContext: (item) => ({ + kind: 'workflow', + workflowId: item.id, + label: item.name || 'Untitled Workflow', + }), + useInsertFallback: true, + }, + knowledge: { + title: 'Knowledge Bases', + dataKey: 'knowledgeBases', + loadingKey: 'isLoadingKnowledge', + ensureLoadedKey: 'ensureKnowledgeLoaded', + getLabel: (item) => item.name || 'Untitled', + getId: (item) => item.id, + emptyMessage: 'No knowledge bases', + noMatchMessage: 'No matching knowledge bases', + filterFn: (item, q) => (item.name || 'Untitled').toLowerCase().includes(q), + buildContext: (item) => ({ + kind: 'knowledge', + knowledgeId: item.id, + label: item.name || 'Untitled', + }), + useInsertFallback: false, + }, + blocks: { + title: 'Blocks', + dataKey: 'blocksList', + loadingKey: 'isLoadingBlocks', + ensureLoadedKey: 'ensureBlocksLoaded', + getLabel: (item) => item.name || item.id, + getId: (item) => item.id, + emptyMessage: 'No blocks found', + noMatchMessage: 'No matching blocks', + filterFn: (item, q) => (item.name || item.id).toLowerCase().includes(q), + buildContext: (item) => ({ + kind: 'blocks', + blockIds: [item.id], + label: item.name || item.id, + }), + useInsertFallback: false, + }, + 'workflow-blocks': { + title: 'Workflow Blocks', + dataKey: 'workflowBlocks', + loadingKey: 'isLoadingWorkflowBlocks', + // No ensureLoadedKey - workflow blocks auto-sync from store + getLabel: (item) => item.name || item.id, + getId: (item) => item.id, + emptyMessage: 'No blocks in this workflow', + noMatchMessage: 'No matching blocks', + filterFn: (item, q) => (item.name || item.id).toLowerCase().includes(q), + buildContext: (item, workflowId) => ({ + kind: 'workflow_block', + workflowId: workflowId || '', + blockId: item.id, + label: item.name || item.id, + }), + useInsertFallback: true, + }, + templates: { + title: 'Templates', + dataKey: 'templatesList', + loadingKey: 'isLoadingTemplates', + ensureLoadedKey: 'ensureTemplatesLoaded', + getLabel: (item) => item.name || 'Untitled Template', + getId: (item) => item.id, + emptyMessage: 'No templates found', + noMatchMessage: 'No matching templates', + filterFn: (item, q) => (item.name || 'Untitled Template').toLowerCase().includes(q), + buildContext: (item) => ({ + kind: 'templates', + templateId: item.id, + label: item.name || 'Untitled Template', + }), + useInsertFallback: false, + }, + logs: { + title: 'Logs', + dataKey: 'logsList', + loadingKey: 'isLoadingLogs', + ensureLoadedKey: 'ensureLogsLoaded', + getLabel: (item) => item.workflowName, + getId: (item) => item.id, + emptyMessage: 'No executions found', + noMatchMessage: 'No matching executions', + filterFn: (item, q) => + [item.workflowName, item.trigger || ''].join(' ').toLowerCase().includes(q), + buildContext: (item) => ({ + kind: 'logs', + executionId: item.executionId || item.id, + label: item.workflowName, + }), + useInsertFallback: false, + }, +} + +/** + * Order of folders in the mention menu + */ +export const FOLDER_ORDER: MentionFolderId[] = [ + 'chats', + 'workflows', + 'knowledge', + 'blocks', + 'workflow-blocks', + 'templates', + 'logs', +] + +/** + * Docs item configuration (special case - not a folder) + */ +export const DOCS_CONFIG = { + getLabel: () => 'Docs', + buildContext: (): ChatContext => ({ kind: 'docs', label: 'Docs' }), +} as const + +/** + * Total number of items in root menu (folders + docs) + */ +export const ROOT_MENU_ITEM_COUNT = FOLDER_ORDER.length + 1 + +/** + * Slash command configuration + */ +export interface SlashCommand { + id: string + label: string +} + +export const TOP_LEVEL_COMMANDS: readonly SlashCommand[] = [ + { id: 'fast', label: 'Fast' }, + { id: 'research', label: 'Research' }, + { id: 'actions', label: 'Actions' }, +] as const + +/** + * Maps UI command IDs to API command IDs. + * Some commands have different IDs for display vs API (e.g., "actions" -> "superagent") + */ +export function getApiCommandId(uiCommandId: string): string { + const commandMapping: Record = { + actions: 'superagent', + } + return commandMapping[uiCommandId] || uiCommandId +} + +export const WEB_COMMANDS: readonly SlashCommand[] = [ + { id: 'search', label: 'Search' }, + { id: 'read', label: 'Read' }, + { id: 'scrape', label: 'Scrape' }, + { id: 'crawl', label: 'Crawl' }, +] as const + +export const ALL_SLASH_COMMANDS: readonly SlashCommand[] = [...TOP_LEVEL_COMMANDS, ...WEB_COMMANDS] + +export const ALL_COMMAND_IDS = ALL_SLASH_COMMANDS.map((cmd) => cmd.id) + +/** + * Get display label for a command ID + */ +export function getCommandDisplayLabel(commandId: string): string { + const command = ALL_SLASH_COMMANDS.find((cmd) => cmd.id === commandId) + return command?.label || commandId.charAt(0).toUpperCase() + commandId.slice(1) +} + +/** + * Model configuration options + */ +export const MODEL_OPTIONS = [ + { value: 'claude-4.6-opus', label: 'Claude 4.6 Opus' }, + { value: 'claude-4.5-opus', label: 'Claude 4.5 Opus' }, + { value: 'claude-4.5-sonnet', label: 'Claude 4.5 Sonnet' }, + { value: 'claude-4.5-haiku', label: 'Claude 4.5 Haiku' }, + { value: 'gpt-5.2-codex', label: 'GPT 5.2 Codex' }, + { value: 'gpt-5.2-pro', label: 'GPT 5.2 Pro' }, + { value: 'gemini-3-pro', label: 'Gemini 3 Pro' }, +] as const + +/** + * Threshold for considering input "near top" of viewport (in pixels) + */ +export const NEAR_TOP_THRESHOLD = 300 + +/** + * Scroll tolerance for mention menu positioning (in pixels) + */ +export const SCROLL_TOLERANCE = 8 + +/** + * Shared CSS classes for menu state text (loading, empty states) + */ +export const MENU_STATE_TEXT_CLASSES = 'px-[8px] py-[8px] text-[12px] text-[var(--text-muted)]' + +/** + * Calculates the next index for circular navigation (wraps around at bounds) + */ +export function getNextIndex(current: number, direction: 'up' | 'down', maxIndex: number): number { + if (direction === 'down') { + return current >= maxIndex ? 0 : current + 1 + } + return current <= 0 ? maxIndex : current - 1 +} diff --git a/.tmp_190f_types.ts b/.tmp_190f_types.ts new file mode 100644 index 0000000000..06b7532321 --- /dev/null +++ b/.tmp_190f_types.ts @@ -0,0 +1,273 @@ +import type { CopilotMode, CopilotModelId } from '@/lib/copilot/models' + +export type { CopilotMode, CopilotModelId } from '@/lib/copilot/models' + +import type { ClientContentBlock } from '@/lib/copilot/client-sse/types' +import type { ClientToolCallState, ClientToolDisplay } from '@/lib/copilot/tools/client/base-tool' +import type { WorkflowState } from '@/stores/workflows/workflow/types' + +export type ToolState = ClientToolCallState + +/** + * Subagent content block for nested thinking/reasoning inside a tool call + */ +export interface SubAgentContentBlock { + type: 'subagent_text' | 'subagent_tool_call' + content?: string + toolCall?: CopilotToolCall + timestamp: number +} + +export interface CopilotToolCall { + id: string + name: string + state: ClientToolCallState + params?: Record + input?: Record + display?: ClientToolDisplay + /** Content streamed from a subagent (e.g., debug agent) */ + subAgentContent?: string + /** Tool calls made by the subagent */ + subAgentToolCalls?: CopilotToolCall[] + /** Structured content blocks for subagent (thinking + tool calls in order) */ + subAgentBlocks?: SubAgentContentBlock[] + /** Whether subagent is currently streaming */ + subAgentStreaming?: boolean +} + +export interface CopilotStreamInfo { + streamId: string + workflowId: string + chatId?: string + userMessageId: string + assistantMessageId: string + lastEventId: number + resumeAttempts: number + userMessageContent: string + fileAttachments?: MessageFileAttachment[] + contexts?: ChatContext[] + startedAt: number +} + +export interface MessageFileAttachment { + id: string + key: string + filename: string + media_type: string + size: number +} + +export interface CopilotMessage { + id: string + role: 'user' | 'assistant' | 'system' + content: string + timestamp: string + citations?: { id: number; title: string; url: string; similarity?: number }[] + toolCalls?: CopilotToolCall[] + contentBlocks?: ClientContentBlock[] + fileAttachments?: MessageFileAttachment[] + contexts?: ChatContext[] + errorType?: 'usage_limit' | 'unauthorized' | 'forbidden' | 'rate_limit' | 'upgrade_required' +} + +/** + * A message queued for sending while another message is in progress. + * Like Cursor's queued message feature. + */ +export interface QueuedMessage { + id: string + content: string + fileAttachments?: MessageFileAttachment[] + contexts?: ChatContext[] + queuedAt: number + /** Original messageId to use when processing (for edit/resend flows) */ + originalMessageId?: string +} + +// Contexts attached to a user message +export type ChatContext = + | { kind: 'past_chat'; chatId: string; label: string } + | { kind: 'workflow'; workflowId: string; label: string } + | { kind: 'current_workflow'; workflowId: string; label: string } + | { kind: 'blocks'; blockIds: string[]; label: string } + | { kind: 'logs'; executionId?: string; label: string } + | { kind: 'workflow_block'; workflowId: string; blockId: string; label: string } + | { kind: 'knowledge'; knowledgeId?: string; label: string } + | { kind: 'templates'; templateId?: string; label: string } + | { kind: 'docs'; label: string } + | { kind: 'slash_command'; command: string; label: string } + +import type { CopilotChat as ApiCopilotChat } from '@/lib/copilot/api' + +export type CopilotChat = ApiCopilotChat + +/** + * A checkpoint entry as returned from the checkpoints API. + */ +export interface CheckpointEntry { + id: string + messageId?: string + workflowState?: Record + createdAt?: string +} + +export interface CopilotState { + mode: CopilotMode + selectedModel: CopilotModelId + agentPrefetch: boolean + enabledModels: string[] | null // Null means not loaded yet, array of model IDs when loaded + isCollapsed: boolean + + currentChat: CopilotChat | null + chats: CopilotChat[] + messages: CopilotMessage[] + workflowId: string | null + + messageCheckpoints: Record + messageSnapshots: Record + + isLoading: boolean + isLoadingChats: boolean + isLoadingCheckpoints: boolean + isSendingMessage: boolean + isSaving: boolean + isRevertingCheckpoint: boolean + isAborting: boolean + /** Skip adding Continue option on abort for queued send-now */ + suppressAbortContinueOption?: boolean + + error: string | null + saveError: string | null + checkpointError: string | null + + abortController: AbortController | null + + chatsLastLoadedAt: Date | null + chatsLoadedForWorkflow: string | null + + revertState: { messageId: string; messageContent: string } | null + inputValue: string + + planTodos: Array<{ id: string; content: string; completed?: boolean; executing?: boolean }> + showPlanTodos: boolean + + // Streaming plan content from design_workflow tool (for plan mode section) + streamingPlanContent: string + + // Map of toolCallId -> CopilotToolCall for quick access during streaming + toolCallsById: Record + + // Transient flag to prevent auto-selecting a chat during new-chat UX + suppressAutoSelect?: boolean + + // Explicitly track the current user message id for this in-flight query (for stats/diff correlation) + currentUserMessageId?: string | null + + // Per-message metadata captured at send-time for reliable stats + + // Auto-allowed integration tools (tools that can run without confirmation) + autoAllowedTools: string[] + autoAllowedToolsLoaded: boolean + + // Active stream metadata for reconnect/replay + activeStream: CopilotStreamInfo | null + + // Message queue for messages sent while another is in progress + messageQueue: QueuedMessage[] + + // Credential IDs to mask in UI (for sensitive data protection) + sensitiveCredentialIds: Set +} + +export interface CopilotActions { + setMode: (mode: CopilotMode) => void + setSelectedModel: (model: CopilotStore['selectedModel']) => Promise + setAgentPrefetch: (prefetch: boolean) => void + setEnabledModels: (models: string[] | null) => void + + setWorkflowId: (workflowId: string | null) => Promise + validateCurrentChat: () => boolean + loadChats: (forceRefresh?: boolean) => Promise + selectChat: (chat: CopilotChat) => Promise + createNewChat: () => Promise + deleteChat: (chatId: string) => Promise + + sendMessage: ( + message: string, + options?: { + stream?: boolean + fileAttachments?: MessageFileAttachment[] + contexts?: ChatContext[] + messageId?: string + queueIfBusy?: boolean + } + ) => Promise + abortMessage: (options?: { suppressContinueOption?: boolean }) => void + sendImplicitFeedback: ( + implicitFeedback: string, + toolCallState?: 'accepted' | 'rejected' | 'error' + ) => Promise + updatePreviewToolCallState: ( + toolCallState: 'accepted' | 'rejected' | 'error', + toolCallId?: string + ) => void + resumeActiveStream: () => Promise + setToolCallState: (toolCall: CopilotToolCall, newState: ClientToolCallState | string) => void + updateToolCallParams: (toolCallId: string, params: Record) => void + loadMessageCheckpoints: (chatId: string) => Promise + revertToCheckpoint: (checkpointId: string) => Promise + getCheckpointsForMessage: (messageId: string) => CheckpointEntry[] + saveMessageCheckpoint: (messageId: string) => Promise + + clearMessages: () => void + clearError: () => void + clearSaveError: () => void + clearCheckpointError: () => void + cleanup: () => void + reset: () => void + + setInputValue: (value: string) => void + clearRevertState: () => void + + setPlanTodos: ( + todos: Array<{ id: string; content: string; completed?: boolean; executing?: boolean }> + ) => void + updatePlanTodoStatus: (id: string, status: 'executing' | 'completed') => void + closePlanTodos: () => void + clearPlanArtifact: () => Promise + savePlanArtifact: (content: string) => Promise + + handleStreamingResponse: ( + stream: ReadableStream, + messageId: string, + isContinuation?: boolean, + triggerUserMessageId?: string, + abortSignal?: AbortSignal + ) => Promise + handleNewChatCreation: (newChatId: string) => Promise + loadAutoAllowedTools: () => Promise + addAutoAllowedTool: (toolId: string) => Promise + removeAutoAllowedTool: (toolId: string) => Promise + isToolAutoAllowed: (toolId: string) => boolean + + // Credential masking + loadSensitiveCredentialIds: () => Promise + maskCredentialValue: (value: string) => string + + // Message queue actions + addToQueue: ( + message: string, + options?: { + fileAttachments?: MessageFileAttachment[] + contexts?: ChatContext[] + /** Original messageId to preserve (for edit/resend flows) */ + messageId?: string + } + ) => void + removeFromQueue: (id: string) => void + moveUpInQueue: (id: string) => void + sendNow: (id: string) => Promise + clearQueue: () => void +} + +export type CopilotStore = CopilotState & CopilotActions diff --git a/.tmp_old_chat_lifecycle.ts b/.tmp_old_chat_lifecycle.ts new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.tmp_old_client_manager.ts b/.tmp_old_client_manager.ts new file mode 100644 index 0000000000..bb83771d1b --- /dev/null +++ b/.tmp_old_client_manager.ts @@ -0,0 +1,24 @@ +const instances: Record = {} + +let syncStateFn: ((toolCallId: string, nextState: any, options?: { result?: any }) => void) | null = + null + +export function registerClientTool(toolCallId: string, instance: any) { + instances[toolCallId] = instance +} + +export function getClientTool(toolCallId: string): any | undefined { + return instances[toolCallId] +} + +export function registerToolStateSync( + fn: (toolCallId: string, nextState: any, options?: { result?: any }) => void +) { + syncStateFn = fn +} + +export function syncToolState(toolCallId: string, nextState: any, options?: { result?: any }) { + try { + syncStateFn?.(toolCallId, nextState, options) + } catch {} +} diff --git a/.tmp_old_client_registry.ts b/.tmp_old_client_registry.ts new file mode 100644 index 0000000000..7dfb757aa9 --- /dev/null +++ b/.tmp_old_client_registry.ts @@ -0,0 +1,34 @@ +import { createLogger } from '@sim/logger' +import type { ClientToolDefinition, ToolExecutionContext } from '@/lib/copilot/tools/client/types' + +const logger = createLogger('ClientToolRegistry') + +const tools: Record> = {} + +export function registerTool(def: ClientToolDefinition) { + tools[def.name] = def +} + +export function getTool(name: string): ClientToolDefinition | undefined { + return tools[name] +} + +export function createExecutionContext(params: { + toolCallId: string + toolName: string +}): ToolExecutionContext { + const { toolCallId, toolName } = params + return { + toolCallId, + toolName, + log: (level, message, extra) => { + try { + logger[level](message, { toolCallId, toolName, ...(extra || {}) }) + } catch {} + }, + } +} + +export function getRegisteredTools(): Record> { + return { ...tools } +} diff --git a/.tmp_old_client_sse_handlers.ts b/.tmp_old_client_sse_handlers.ts new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.tmp_old_constants.ts b/.tmp_old_constants.ts new file mode 100644 index 0000000000..b98af5dd21 --- /dev/null +++ b/.tmp_old_constants.ts @@ -0,0 +1,280 @@ +import type { ChatContext } from '@/stores/panel' + +/** + * Mention folder types + */ +export type MentionFolderId = + | 'chats' + | 'workflows' + | 'knowledge' + | 'blocks' + | 'workflow-blocks' + | 'templates' + | 'logs' + +/** + * Menu item category types for mention menu (includes folders + docs item) + */ +export type MentionCategory = MentionFolderId | 'docs' + +/** + * Configuration interface for folder types + */ +export interface FolderConfig { + /** Display title in menu */ + title: string + /** Data source key in useMentionData return */ + dataKey: string + /** Loading state key in useMentionData return */ + loadingKey: string + /** Ensure loaded function key in useMentionData return (optional - some folders auto-load) */ + ensureLoadedKey?: string + /** Extract label from an item */ + getLabel: (item: TItem) => string + /** Extract unique ID from an item */ + getId: (item: TItem) => string + /** Empty state message */ + emptyMessage: string + /** No match message (when filtering) */ + noMatchMessage: string + /** Filter function for matching query */ + filterFn: (item: TItem, query: string) => boolean + /** Build the ChatContext object from an item */ + buildContext: (item: TItem, workflowId?: string | null) => ChatContext + /** Whether to use insertAtCursor fallback when replaceActiveMentionWith fails */ + useInsertFallback?: boolean +} + +/** + * Configuration for all folder types in the mention menu + */ +export const FOLDER_CONFIGS: Record = { + chats: { + title: 'Chats', + dataKey: 'pastChats', + loadingKey: 'isLoadingPastChats', + ensureLoadedKey: 'ensurePastChatsLoaded', + getLabel: (item) => item.title || 'New Chat', + getId: (item) => item.id, + emptyMessage: 'No past chats', + noMatchMessage: 'No matching chats', + filterFn: (item, q) => (item.title || 'New Chat').toLowerCase().includes(q), + buildContext: (item) => ({ + kind: 'past_chat', + chatId: item.id, + label: item.title || 'New Chat', + }), + useInsertFallback: false, + }, + workflows: { + title: 'All workflows', + dataKey: 'workflows', + loadingKey: 'isLoadingWorkflows', + // No ensureLoadedKey - workflows auto-load from registry store + getLabel: (item) => item.name || 'Untitled Workflow', + getId: (item) => item.id, + emptyMessage: 'No workflows', + noMatchMessage: 'No matching workflows', + filterFn: (item, q) => (item.name || 'Untitled Workflow').toLowerCase().includes(q), + buildContext: (item) => ({ + kind: 'workflow', + workflowId: item.id, + label: item.name || 'Untitled Workflow', + }), + useInsertFallback: true, + }, + knowledge: { + title: 'Knowledge Bases', + dataKey: 'knowledgeBases', + loadingKey: 'isLoadingKnowledge', + ensureLoadedKey: 'ensureKnowledgeLoaded', + getLabel: (item) => item.name || 'Untitled', + getId: (item) => item.id, + emptyMessage: 'No knowledge bases', + noMatchMessage: 'No matching knowledge bases', + filterFn: (item, q) => (item.name || 'Untitled').toLowerCase().includes(q), + buildContext: (item) => ({ + kind: 'knowledge', + knowledgeId: item.id, + label: item.name || 'Untitled', + }), + useInsertFallback: false, + }, + blocks: { + title: 'Blocks', + dataKey: 'blocksList', + loadingKey: 'isLoadingBlocks', + ensureLoadedKey: 'ensureBlocksLoaded', + getLabel: (item) => item.name || item.id, + getId: (item) => item.id, + emptyMessage: 'No blocks found', + noMatchMessage: 'No matching blocks', + filterFn: (item, q) => (item.name || item.id).toLowerCase().includes(q), + buildContext: (item) => ({ + kind: 'blocks', + blockIds: [item.id], + label: item.name || item.id, + }), + useInsertFallback: false, + }, + 'workflow-blocks': { + title: 'Workflow Blocks', + dataKey: 'workflowBlocks', + loadingKey: 'isLoadingWorkflowBlocks', + // No ensureLoadedKey - workflow blocks auto-sync from store + getLabel: (item) => item.name || item.id, + getId: (item) => item.id, + emptyMessage: 'No blocks in this workflow', + noMatchMessage: 'No matching blocks', + filterFn: (item, q) => (item.name || item.id).toLowerCase().includes(q), + buildContext: (item, workflowId) => ({ + kind: 'workflow_block', + workflowId: workflowId || '', + blockId: item.id, + label: item.name || item.id, + }), + useInsertFallback: true, + }, + templates: { + title: 'Templates', + dataKey: 'templatesList', + loadingKey: 'isLoadingTemplates', + ensureLoadedKey: 'ensureTemplatesLoaded', + getLabel: (item) => item.name || 'Untitled Template', + getId: (item) => item.id, + emptyMessage: 'No templates found', + noMatchMessage: 'No matching templates', + filterFn: (item, q) => (item.name || 'Untitled Template').toLowerCase().includes(q), + buildContext: (item) => ({ + kind: 'templates', + templateId: item.id, + label: item.name || 'Untitled Template', + }), + useInsertFallback: false, + }, + logs: { + title: 'Logs', + dataKey: 'logsList', + loadingKey: 'isLoadingLogs', + ensureLoadedKey: 'ensureLogsLoaded', + getLabel: (item) => item.workflowName, + getId: (item) => item.id, + emptyMessage: 'No executions found', + noMatchMessage: 'No matching executions', + filterFn: (item, q) => + [item.workflowName, item.trigger || ''].join(' ').toLowerCase().includes(q), + buildContext: (item) => ({ + kind: 'logs', + executionId: item.executionId || item.id, + label: item.workflowName, + }), + useInsertFallback: false, + }, +} + +/** + * Order of folders in the mention menu + */ +export const FOLDER_ORDER: MentionFolderId[] = [ + 'chats', + 'workflows', + 'knowledge', + 'blocks', + 'workflow-blocks', + 'templates', + 'logs', +] + +/** + * Docs item configuration (special case - not a folder) + */ +export const DOCS_CONFIG = { + getLabel: () => 'Docs', + buildContext: (): ChatContext => ({ kind: 'docs', label: 'Docs' }), +} as const + +/** + * Total number of items in root menu (folders + docs) + */ +export const ROOT_MENU_ITEM_COUNT = FOLDER_ORDER.length + 1 + +/** + * Slash command configuration + */ +export interface SlashCommand { + id: string + label: string +} + +export const TOP_LEVEL_COMMANDS: readonly SlashCommand[] = [ + { id: 'fast', label: 'Fast' }, + { id: 'research', label: 'Research' }, + { id: 'actions', label: 'Actions' }, +] as const + +/** + * Maps UI command IDs to API command IDs. + * Some commands have different IDs for display vs API (e.g., "actions" -> "superagent") + */ +export function getApiCommandId(uiCommandId: string): string { + const commandMapping: Record = { + actions: 'superagent', + } + return commandMapping[uiCommandId] || uiCommandId +} + +export const WEB_COMMANDS: readonly SlashCommand[] = [ + { id: 'search', label: 'Search' }, + { id: 'read', label: 'Read' }, + { id: 'scrape', label: 'Scrape' }, + { id: 'crawl', label: 'Crawl' }, +] as const + +export const ALL_SLASH_COMMANDS: readonly SlashCommand[] = [...TOP_LEVEL_COMMANDS, ...WEB_COMMANDS] + +export const ALL_COMMAND_IDS = ALL_SLASH_COMMANDS.map((cmd) => cmd.id) + +/** + * Get display label for a command ID + */ +export function getCommandDisplayLabel(commandId: string): string { + const command = ALL_SLASH_COMMANDS.find((cmd) => cmd.id === commandId) + return command?.label || commandId.charAt(0).toUpperCase() + commandId.slice(1) +} + +/** + * Model configuration options + */ +export const MODEL_OPTIONS = [ + { value: 'claude-4.5-opus', label: 'Claude 4.5 Opus' }, + { value: 'claude-4.5-sonnet', label: 'Claude 4.5 Sonnet' }, + { value: 'claude-4.5-haiku', label: 'Claude 4.5 Haiku' }, + { value: 'gpt-5.2-codex', label: 'GPT 5.2 Codex' }, + { value: 'gpt-5.2-pro', label: 'GPT 5.2 Pro' }, + { value: 'gemini-3-pro', label: 'Gemini 3 Pro' }, +] as const + +/** + * Threshold for considering input "near top" of viewport (in pixels) + */ +export const NEAR_TOP_THRESHOLD = 300 + +/** + * Scroll tolerance for mention menu positioning (in pixels) + */ +export const SCROLL_TOLERANCE = 8 + +/** + * Shared CSS classes for menu state text (loading, empty states) + */ +export const MENU_STATE_TEXT_CLASSES = 'px-[8px] py-[8px] text-[12px] text-[var(--text-muted)]' + +/** + * Calculates the next index for circular navigation (wraps around at bounds) + */ +export function getNextIndex(current: number, direction: 'up' | 'down', maxIndex: number): number { + if (direction === 'down') { + return current >= maxIndex ? 0 : current + 1 + } + return current <= 0 ? maxIndex : current - 1 +} diff --git a/.tmp_old_copilot_api.ts b/.tmp_old_copilot_api.ts new file mode 100644 index 0000000000..c680f9751c --- /dev/null +++ b/.tmp_old_copilot_api.ts @@ -0,0 +1,186 @@ +import { createLogger } from '@sim/logger' +import type { CopilotMode, CopilotModelId, CopilotTransportMode } from '@/lib/copilot/models' + +const logger = createLogger('CopilotAPI') + +/** + * Citation interface for documentation references + */ +export interface Citation { + id: number + title: string + url: string + similarity?: number +} + +/** + * Message interface for copilot conversations + */ +export interface CopilotMessage { + id: string + role: 'user' | 'assistant' | 'system' + content: string + timestamp: string + citations?: Citation[] +} + +/** + * Chat config stored in database + */ +export interface CopilotChatConfig { + mode?: CopilotMode + model?: CopilotModelId +} + +/** + * Chat interface for copilot conversations + */ +export interface CopilotChat { + id: string + title: string | null + model: string + messages: CopilotMessage[] + messageCount: number + planArtifact: string | null + config: CopilotChatConfig | null + createdAt: Date + updatedAt: Date +} + +/** + * File attachment interface for message requests + */ +export interface MessageFileAttachment { + id: string + key: string + filename: string + media_type: string + size: number +} + +/** + * Request interface for sending messages + */ +export interface SendMessageRequest { + message: string + userMessageId?: string // ID from frontend for the user message + chatId?: string + workflowId?: string + mode?: CopilotMode | CopilotTransportMode + model?: CopilotModelId + prefetch?: boolean + createNewChat?: boolean + stream?: boolean + implicitFeedback?: string + fileAttachments?: MessageFileAttachment[] + abortSignal?: AbortSignal + contexts?: Array<{ + kind: string + label?: string + chatId?: string + workflowId?: string + executionId?: string + }> + commands?: string[] +} + +/** + * Base API response interface + */ +export interface ApiResponse { + success: boolean + error?: string + status?: number +} + +/** + * Streaming response interface + */ +export interface StreamingResponse extends ApiResponse { + stream?: ReadableStream +} + +/** + * Handle API errors and return user-friendly error messages + */ +async function handleApiError(response: Response, defaultMessage: string): Promise { + try { + const data = await response.json() + return (data && (data.error || data.message)) || defaultMessage + } catch { + return `${defaultMessage} (${response.status})` + } +} + +/** + * Send a streaming message to the copilot chat API + * This is the main API endpoint that handles all chat operations + */ +export async function sendStreamingMessage( + request: SendMessageRequest +): Promise { + try { + const { abortSignal, ...requestBody } = request + try { + const preview = Array.isArray((requestBody as any).contexts) + ? (requestBody as any).contexts.map((c: any) => ({ + kind: c?.kind, + chatId: c?.chatId, + workflowId: c?.workflowId, + label: c?.label, + })) + : undefined + logger.info('Preparing to send streaming message', { + hasContexts: Array.isArray((requestBody as any).contexts), + contextsCount: Array.isArray((requestBody as any).contexts) + ? (requestBody as any).contexts.length + : 0, + contextsPreview: preview, + }) + } catch {} + const response = await fetch('/api/copilot/chat', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ ...requestBody, stream: true }), + signal: abortSignal, + credentials: 'include', // Include cookies for session authentication + }) + + if (!response.ok) { + const errorMessage = await handleApiError(response, 'Failed to send streaming message') + return { + success: false, + error: errorMessage, + status: response.status, + } + } + + if (!response.body) { + return { + success: false, + error: 'No response body received', + status: 500, + } + } + + return { + success: true, + stream: response.body, + } + } catch (error) { + // Handle AbortError gracefully - this is expected when user aborts + if (error instanceof Error && error.name === 'AbortError') { + logger.info('Streaming message was aborted by user') + return { + success: false, + error: 'Request was aborted', + } + } + + logger.error('Failed to send streaming message:', error) + return { + success: false, + error: error instanceof Error ? error.message : 'Unknown error', + } + } +} diff --git a/.tmp_old_execute_copilot_server_tool_route.ts b/.tmp_old_execute_copilot_server_tool_route.ts new file mode 100644 index 0000000000..5627ae8976 --- /dev/null +++ b/.tmp_old_execute_copilot_server_tool_route.ts @@ -0,0 +1,54 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { + authenticateCopilotRequestSessionOnly, + createBadRequestResponse, + createInternalServerErrorResponse, + createRequestTracker, + createUnauthorizedResponse, +} from '@/lib/copilot/request-helpers' +import { routeExecution } from '@/lib/copilot/tools/server/router' + +const logger = createLogger('ExecuteCopilotServerToolAPI') + +const ExecuteSchema = z.object({ + toolName: z.string(), + payload: z.unknown().optional(), +}) + +export async function POST(req: NextRequest) { + const tracker = createRequestTracker() + try { + const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly() + if (!isAuthenticated || !userId) { + return createUnauthorizedResponse() + } + + const body = await req.json() + try { + const preview = JSON.stringify(body).slice(0, 300) + logger.debug(`[${tracker.requestId}] Incoming request body preview`, { preview }) + } catch {} + + const { toolName, payload } = ExecuteSchema.parse(body) + + logger.info(`[${tracker.requestId}] Executing server tool`, { toolName }) + const result = await routeExecution(toolName, payload, { userId }) + + try { + const resultPreview = JSON.stringify(result).slice(0, 300) + logger.debug(`[${tracker.requestId}] Server tool result preview`, { toolName, resultPreview }) + } catch {} + + return NextResponse.json({ success: true, result }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.debug(`[${tracker.requestId}] Zod validation error`, { issues: error.issues }) + return createBadRequestResponse('Invalid request body for execute-copilot-server-tool') + } + logger.error(`[${tracker.requestId}] Failed to execute server tool:`, error) + const errorMessage = error instanceof Error ? error.message : 'Failed to execute server tool' + return createInternalServerErrorResponse(errorMessage) + } +} diff --git a/.tmp_old_execute_server_tool_route.ts b/.tmp_old_execute_server_tool_route.ts new file mode 100644 index 0000000000..5627ae8976 --- /dev/null +++ b/.tmp_old_execute_server_tool_route.ts @@ -0,0 +1,54 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { + authenticateCopilotRequestSessionOnly, + createBadRequestResponse, + createInternalServerErrorResponse, + createRequestTracker, + createUnauthorizedResponse, +} from '@/lib/copilot/request-helpers' +import { routeExecution } from '@/lib/copilot/tools/server/router' + +const logger = createLogger('ExecuteCopilotServerToolAPI') + +const ExecuteSchema = z.object({ + toolName: z.string(), + payload: z.unknown().optional(), +}) + +export async function POST(req: NextRequest) { + const tracker = createRequestTracker() + try { + const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly() + if (!isAuthenticated || !userId) { + return createUnauthorizedResponse() + } + + const body = await req.json() + try { + const preview = JSON.stringify(body).slice(0, 300) + logger.debug(`[${tracker.requestId}] Incoming request body preview`, { preview }) + } catch {} + + const { toolName, payload } = ExecuteSchema.parse(body) + + logger.info(`[${tracker.requestId}] Executing server tool`, { toolName }) + const result = await routeExecution(toolName, payload, { userId }) + + try { + const resultPreview = JSON.stringify(result).slice(0, 300) + logger.debug(`[${tracker.requestId}] Server tool result preview`, { toolName, resultPreview }) + } catch {} + + return NextResponse.json({ success: true, result }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.debug(`[${tracker.requestId}] Zod validation error`, { issues: error.issues }) + return createBadRequestResponse('Invalid request body for execute-copilot-server-tool') + } + logger.error(`[${tracker.requestId}] Failed to execute server tool:`, error) + const errorMessage = error instanceof Error ? error.message : 'Failed to execute server tool' + return createInternalServerErrorResponse(errorMessage) + } +} diff --git a/.tmp_old_execute_tool_route.ts b/.tmp_old_execute_tool_route.ts new file mode 100644 index 0000000000..d134d28eb8 --- /dev/null +++ b/.tmp_old_execute_tool_route.ts @@ -0,0 +1,247 @@ +import { db } from '@sim/db' +import { account, workflow } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { + createBadRequestResponse, + createInternalServerErrorResponse, + createRequestTracker, + createUnauthorizedResponse, +} from '@/lib/copilot/request-helpers' +import { generateRequestId } from '@/lib/core/utils/request' +import { getEffectiveDecryptedEnv } from '@/lib/environment/utils' +import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils' +import { resolveEnvVarReferences } from '@/executor/utils/reference-validation' +import { executeTool } from '@/tools' +import { getTool, resolveToolId } from '@/tools/utils' + +const logger = createLogger('CopilotExecuteToolAPI') + +const ExecuteToolSchema = z.object({ + toolCallId: z.string(), + toolName: z.string(), + arguments: z.record(z.any()).optional().default({}), + workflowId: z.string().optional(), +}) + +export async function POST(req: NextRequest) { + const tracker = createRequestTracker() + + try { + const session = await getSession() + if (!session?.user?.id) { + return createUnauthorizedResponse() + } + + const userId = session.user.id + const body = await req.json() + + try { + const preview = JSON.stringify(body).slice(0, 300) + logger.debug(`[${tracker.requestId}] Incoming execute-tool request`, { preview }) + } catch {} + + const { toolCallId, toolName, arguments: toolArgs, workflowId } = ExecuteToolSchema.parse(body) + + const resolvedToolName = resolveToolId(toolName) + + logger.info(`[${tracker.requestId}] Executing tool`, { + toolCallId, + toolName, + resolvedToolName, + workflowId, + hasArgs: Object.keys(toolArgs).length > 0, + }) + + const toolConfig = getTool(resolvedToolName) + if (!toolConfig) { + // Find similar tool names to help debug + const { tools: allTools } = await import('@/tools/registry') + const allToolNames = Object.keys(allTools) + const prefix = toolName.split('_').slice(0, 2).join('_') + const similarTools = allToolNames + .filter((name) => name.startsWith(`${prefix.split('_')[0]}_`)) + .slice(0, 10) + + logger.warn(`[${tracker.requestId}] Tool not found in registry`, { + toolName, + prefix, + similarTools, + totalToolsInRegistry: allToolNames.length, + }) + return NextResponse.json( + { + success: false, + error: `Tool not found: ${toolName}. Similar tools: ${similarTools.join(', ')}`, + toolCallId, + }, + { status: 404 } + ) + } + + // Get the workspaceId from the workflow (env vars are stored at workspace level) + let workspaceId: string | undefined + if (workflowId) { + const workflowResult = await db + .select({ workspaceId: workflow.workspaceId }) + .from(workflow) + .where(eq(workflow.id, workflowId)) + .limit(1) + workspaceId = workflowResult[0]?.workspaceId ?? undefined + } + + // Get decrypted environment variables early so we can resolve all {{VAR}} references + const decryptedEnvVars = await getEffectiveDecryptedEnv(userId, workspaceId) + + logger.info(`[${tracker.requestId}] Fetched environment variables`, { + workflowId, + workspaceId, + envVarCount: Object.keys(decryptedEnvVars).length, + envVarKeys: Object.keys(decryptedEnvVars), + }) + + // Build execution params starting with LLM-provided arguments + // Resolve all {{ENV_VAR}} references in the arguments (deep for nested objects) + const executionParams: Record = resolveEnvVarReferences( + toolArgs, + decryptedEnvVars, + { deep: true } + ) as Record + + logger.info(`[${tracker.requestId}] Resolved env var references in arguments`, { + toolName, + originalArgKeys: Object.keys(toolArgs), + resolvedArgKeys: Object.keys(executionParams), + }) + + // Resolve OAuth access token if required + if (toolConfig.oauth?.required && toolConfig.oauth.provider) { + const provider = toolConfig.oauth.provider + logger.info(`[${tracker.requestId}] Resolving OAuth token`, { provider }) + + try { + // Find the account for this provider and user + const accounts = await db + .select() + .from(account) + .where(and(eq(account.providerId, provider), eq(account.userId, userId))) + .limit(1) + + if (accounts.length > 0) { + const acc = accounts[0] + const requestId = generateRequestId() + const { accessToken } = await refreshTokenIfNeeded(requestId, acc as any, acc.id) + + if (accessToken) { + executionParams.accessToken = accessToken + logger.info(`[${tracker.requestId}] OAuth token resolved`, { provider }) + } else { + logger.warn(`[${tracker.requestId}] No access token available`, { provider }) + return NextResponse.json( + { + success: false, + error: `OAuth token not available for ${provider}. Please reconnect your account.`, + toolCallId, + }, + { status: 400 } + ) + } + } else { + logger.warn(`[${tracker.requestId}] No account found for provider`, { provider }) + return NextResponse.json( + { + success: false, + error: `No ${provider} account connected. Please connect your account first.`, + toolCallId, + }, + { status: 400 } + ) + } + } catch (error) { + logger.error(`[${tracker.requestId}] Failed to resolve OAuth token`, { + provider, + error: error instanceof Error ? error.message : String(error), + }) + return NextResponse.json( + { + success: false, + error: `Failed to get OAuth token for ${provider}`, + toolCallId, + }, + { status: 500 } + ) + } + } + + // Check if tool requires an API key that wasn't resolved via {{ENV_VAR}} reference + const needsApiKey = toolConfig.params?.apiKey?.required + + if (needsApiKey && !executionParams.apiKey) { + logger.warn(`[${tracker.requestId}] No API key found for tool`, { toolName }) + return NextResponse.json( + { + success: false, + error: `API key not provided for ${toolName}. Use {{YOUR_API_KEY_ENV_VAR}} to reference your environment variable.`, + toolCallId, + }, + { status: 400 } + ) + } + + // Add execution context + executionParams._context = { + workflowId, + userId, + } + + // Special handling for function_execute - inject environment variables + if (toolName === 'function_execute') { + executionParams.envVars = decryptedEnvVars + executionParams.workflowVariables = {} // No workflow variables in copilot context + executionParams.blockData = {} // No block data in copilot context + executionParams.blockNameMapping = {} // No block mapping in copilot context + executionParams.language = executionParams.language || 'javascript' + executionParams.timeout = executionParams.timeout || 30000 + + logger.info(`[${tracker.requestId}] Injected env vars for function_execute`, { + envVarCount: Object.keys(decryptedEnvVars).length, + }) + } + + // Execute the tool + logger.info(`[${tracker.requestId}] Executing tool with resolved credentials`, { + toolName, + hasAccessToken: !!executionParams.accessToken, + hasApiKey: !!executionParams.apiKey, + }) + + const result = await executeTool(resolvedToolName, executionParams) + + logger.info(`[${tracker.requestId}] Tool execution complete`, { + toolName, + success: result.success, + hasOutput: !!result.output, + }) + + return NextResponse.json({ + success: true, + toolCallId, + result: { + success: result.success, + output: result.output, + error: result.error, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.debug(`[${tracker.requestId}] Zod validation error`, { issues: error.issues }) + return createBadRequestResponse('Invalid request body for execute-tool') + } + logger.error(`[${tracker.requestId}] Failed to execute tool:`, error) + const errorMessage = error instanceof Error ? error.message : 'Failed to execute tool' + return createInternalServerErrorResponse(errorMessage) + } +} diff --git a/.tmp_old_get_examples_rag.ts b/.tmp_old_get_examples_rag.ts new file mode 100644 index 0000000000..258330e0e9 --- /dev/null +++ b/.tmp_old_get_examples_rag.ts @@ -0,0 +1,52 @@ +import { Loader2, MinusCircle, Search, XCircle } from 'lucide-react' +import { + BaseClientTool, + type BaseClientToolMetadata, + ClientToolCallState, +} from '@/lib/copilot/tools/client/base-tool' + +export class GetExamplesRagClientTool extends BaseClientTool { + static readonly id = 'get_examples_rag' + + constructor(toolCallId: string) { + super(toolCallId, GetExamplesRagClientTool.id, GetExamplesRagClientTool.metadata) + } + + static readonly metadata: BaseClientToolMetadata = { + displayNames: { + [ClientToolCallState.generating]: { text: 'Fetching examples', icon: Loader2 }, + [ClientToolCallState.pending]: { text: 'Fetching examples', icon: Loader2 }, + [ClientToolCallState.executing]: { text: 'Fetching examples', icon: Loader2 }, + [ClientToolCallState.success]: { text: 'Fetched examples', icon: Search }, + [ClientToolCallState.error]: { text: 'Failed to fetch examples', icon: XCircle }, + [ClientToolCallState.aborted]: { text: 'Aborted getting examples', icon: MinusCircle }, + [ClientToolCallState.rejected]: { text: 'Skipped getting examples', icon: MinusCircle }, + }, + interrupt: undefined, + getDynamicText: (params, state) => { + if (params?.query && typeof params.query === 'string') { + const query = params.query + + switch (state) { + case ClientToolCallState.success: + return `Found examples for ${query}` + case ClientToolCallState.executing: + case ClientToolCallState.generating: + case ClientToolCallState.pending: + return `Searching examples for ${query}` + case ClientToolCallState.error: + return `Failed to find examples for ${query}` + case ClientToolCallState.aborted: + return `Aborted searching examples for ${query}` + case ClientToolCallState.rejected: + return `Skipped searching examples for ${query}` + } + } + return undefined + }, + } + + async execute(): Promise { + return + } +} diff --git a/.tmp_old_get_operations_examples.ts b/.tmp_old_get_operations_examples.ts new file mode 100644 index 0000000000..4a14b71ef8 --- /dev/null +++ b/.tmp_old_get_operations_examples.ts @@ -0,0 +1,58 @@ +import { Loader2, MinusCircle, XCircle, Zap } from 'lucide-react' +import { + BaseClientTool, + type BaseClientToolMetadata, + ClientToolCallState, +} from '@/lib/copilot/tools/client/base-tool' + +export class GetOperationsExamplesClientTool extends BaseClientTool { + static readonly id = 'get_operations_examples' + + constructor(toolCallId: string) { + super(toolCallId, GetOperationsExamplesClientTool.id, GetOperationsExamplesClientTool.metadata) + } + + static readonly metadata: BaseClientToolMetadata = { + displayNames: { + [ClientToolCallState.generating]: { text: 'Designing workflow component', icon: Loader2 }, + [ClientToolCallState.pending]: { text: 'Designing workflow component', icon: Loader2 }, + [ClientToolCallState.executing]: { text: 'Designing workflow component', icon: Loader2 }, + [ClientToolCallState.success]: { text: 'Designed workflow component', icon: Zap }, + [ClientToolCallState.error]: { text: 'Failed to design workflow component', icon: XCircle }, + [ClientToolCallState.aborted]: { + text: 'Aborted designing workflow component', + icon: MinusCircle, + }, + [ClientToolCallState.rejected]: { + text: 'Skipped designing workflow component', + icon: MinusCircle, + }, + }, + interrupt: undefined, + getDynamicText: (params, state) => { + if (params?.query && typeof params.query === 'string') { + const query = params.query + + switch (state) { + case ClientToolCallState.success: + return `Designed ${query}` + case ClientToolCallState.executing: + case ClientToolCallState.generating: + case ClientToolCallState.pending: + return `Designing ${query}` + case ClientToolCallState.error: + return `Failed to design ${query}` + case ClientToolCallState.aborted: + return `Aborted designing ${query}` + case ClientToolCallState.rejected: + return `Skipped designing ${query}` + } + } + return undefined + }, + } + + async execute(): Promise { + return + } +} diff --git a/.tmp_old_get_trigger_examples.ts b/.tmp_old_get_trigger_examples.ts new file mode 100644 index 0000000000..f24ea48017 --- /dev/null +++ b/.tmp_old_get_trigger_examples.ts @@ -0,0 +1,31 @@ +import { Loader2, MinusCircle, XCircle, Zap } from 'lucide-react' +import { + BaseClientTool, + type BaseClientToolMetadata, + ClientToolCallState, +} from '@/lib/copilot/tools/client/base-tool' + +export class GetTriggerExamplesClientTool extends BaseClientTool { + static readonly id = 'get_trigger_examples' + + constructor(toolCallId: string) { + super(toolCallId, GetTriggerExamplesClientTool.id, GetTriggerExamplesClientTool.metadata) + } + + static readonly metadata: BaseClientToolMetadata = { + displayNames: { + [ClientToolCallState.generating]: { text: 'Selecting a trigger', icon: Loader2 }, + [ClientToolCallState.pending]: { text: 'Selecting a trigger', icon: Loader2 }, + [ClientToolCallState.executing]: { text: 'Selecting a trigger', icon: Loader2 }, + [ClientToolCallState.success]: { text: 'Selected a trigger', icon: Zap }, + [ClientToolCallState.error]: { text: 'Failed to select a trigger', icon: XCircle }, + [ClientToolCallState.aborted]: { text: 'Aborted selecting a trigger', icon: MinusCircle }, + [ClientToolCallState.rejected]: { text: 'Skipped selecting a trigger', icon: MinusCircle }, + }, + interrupt: undefined, + } + + async execute(): Promise { + return + } +} diff --git a/.tmp_old_init_tool_configs.ts b/.tmp_old_init_tool_configs.ts new file mode 100644 index 0000000000..336fdbb0c5 --- /dev/null +++ b/.tmp_old_init_tool_configs.ts @@ -0,0 +1,36 @@ +/** + * Initialize all tool UI configurations. + * + * This module imports all client tools to trigger their UI config registration. + * Import this module early in the app to ensure all tool configs are available. + */ + +// Other tools (subagents) +import './other/auth' +import './other/custom-tool' +import './other/debug' +import './other/deploy' +import './other/edit' +import './other/evaluate' +import './other/info' +import './other/knowledge' +import './other/make-api-request' +import './other/plan' +import './other/research' +import './other/sleep' +import './other/superagent' +import './other/test' +import './other/tour' +import './other/workflow' + +// Workflow tools +import './workflow/deploy-api' +import './workflow/deploy-chat' +import './workflow/deploy-mcp' +import './workflow/edit-workflow' +import './workflow/redeploy' +import './workflow/run-workflow' +import './workflow/set-global-workflow-variables' + +// User tools +import './user/set-environment-variables' diff --git a/.tmp_old_manage_custom_tool_client.ts b/.tmp_old_manage_custom_tool_client.ts new file mode 100644 index 0000000000..58a8236376 --- /dev/null +++ b/.tmp_old_manage_custom_tool_client.ts @@ -0,0 +1,408 @@ +import { createLogger } from '@sim/logger' +import { Check, Loader2, Plus, X, XCircle } from 'lucide-react' +import { client } from '@/lib/auth/auth-client' +import { + BaseClientTool, + type BaseClientToolMetadata, + ClientToolCallState, +} from '@/lib/copilot/tools/client/base-tool' +import { getCustomTool } from '@/hooks/queries/custom-tools' +import { useCopilotStore } from '@/stores/panel/copilot/store' +import { useWorkflowRegistry } from '@/stores/workflows/registry/store' + +interface CustomToolSchema { + type: 'function' + function: { + name: string + description?: string + parameters: { + type: string + properties: Record + required?: string[] + } + } +} + +interface ManageCustomToolArgs { + operation: 'add' | 'edit' | 'delete' | 'list' + toolId?: string + schema?: CustomToolSchema + code?: string +} + +const API_ENDPOINT = '/api/tools/custom' + +async function checkCustomToolsPermission(): Promise { + const activeOrgResponse = await client.organization.getFullOrganization() + const organizationId = activeOrgResponse.data?.id + if (!organizationId) return + + const response = await fetch(`/api/permission-groups/user?organizationId=${organizationId}`) + if (!response.ok) return + + const data = await response.json() + if (data?.config?.disableCustomTools) { + throw new Error('Custom tools are not allowed based on your permission group settings') + } +} + +/** + * Client tool for creating, editing, and deleting custom tools via the copilot. + */ +export class ManageCustomToolClientTool extends BaseClientTool { + static readonly id = 'manage_custom_tool' + private currentArgs?: ManageCustomToolArgs + + constructor(toolCallId: string) { + super(toolCallId, ManageCustomToolClientTool.id, ManageCustomToolClientTool.metadata) + } + + static readonly metadata: BaseClientToolMetadata = { + displayNames: { + [ClientToolCallState.generating]: { + text: 'Managing custom tool', + icon: Loader2, + }, + [ClientToolCallState.pending]: { text: 'Manage custom tool?', icon: Plus }, + [ClientToolCallState.executing]: { text: 'Managing custom tool', icon: Loader2 }, + [ClientToolCallState.success]: { text: 'Managed custom tool', icon: Check }, + [ClientToolCallState.error]: { text: 'Failed to manage custom tool', icon: X }, + [ClientToolCallState.aborted]: { + text: 'Aborted managing custom tool', + icon: XCircle, + }, + [ClientToolCallState.rejected]: { + text: 'Skipped managing custom tool', + icon: XCircle, + }, + }, + interrupt: { + accept: { text: 'Allow', icon: Check }, + reject: { text: 'Skip', icon: XCircle }, + }, + getDynamicText: (params, state) => { + const operation = params?.operation as 'add' | 'edit' | 'delete' | 'list' | undefined + + if (!operation) return undefined + + let toolName = params?.schema?.function?.name + if (!toolName && params?.toolId) { + try { + const tool = getCustomTool(params.toolId) + toolName = tool?.schema?.function?.name + } catch { + // Ignore errors accessing cache + } + } + + const getActionText = (verb: 'present' | 'past' | 'gerund') => { + switch (operation) { + case 'add': + return verb === 'present' ? 'Create' : verb === 'past' ? 'Created' : 'Creating' + case 'edit': + return verb === 'present' ? 'Edit' : verb === 'past' ? 'Edited' : 'Editing' + case 'delete': + return verb === 'present' ? 'Delete' : verb === 'past' ? 'Deleted' : 'Deleting' + case 'list': + return verb === 'present' ? 'List' : verb === 'past' ? 'Listed' : 'Listing' + default: + return verb === 'present' ? 'Manage' : verb === 'past' ? 'Managed' : 'Managing' + } + } + + // For add: only show tool name in past tense (success) + // For edit/delete: always show tool name + // For list: never show individual tool name, use plural + const shouldShowToolName = (currentState: ClientToolCallState) => { + if (operation === 'list') return false + if (operation === 'add') { + return currentState === ClientToolCallState.success + } + return true // edit and delete always show tool name + } + + const nameText = + operation === 'list' + ? ' custom tools' + : shouldShowToolName(state) && toolName + ? ` ${toolName}` + : ' custom tool' + + switch (state) { + case ClientToolCallState.success: + return `${getActionText('past')}${nameText}` + case ClientToolCallState.executing: + return `${getActionText('gerund')}${nameText}` + case ClientToolCallState.generating: + return `${getActionText('gerund')}${nameText}` + case ClientToolCallState.pending: + return `${getActionText('present')}${nameText}?` + case ClientToolCallState.error: + return `Failed to ${getActionText('present')?.toLowerCase()}${nameText}` + case ClientToolCallState.aborted: + return `Aborted ${getActionText('gerund')?.toLowerCase()}${nameText}` + case ClientToolCallState.rejected: + return `Skipped ${getActionText('gerund')?.toLowerCase()}${nameText}` + } + return undefined + }, + } + + /** + * Gets the tool call args from the copilot store (needed before execute() is called) + */ + private getArgsFromStore(): ManageCustomToolArgs | undefined { + try { + const { toolCallsById } = useCopilotStore.getState() + const toolCall = toolCallsById[this.toolCallId] + return (toolCall as any)?.params as ManageCustomToolArgs | undefined + } catch { + return undefined + } + } + + /** + * Override getInterruptDisplays to only show confirmation for edit and delete operations. + * Add operations execute directly without confirmation. + */ + getInterruptDisplays(): BaseClientToolMetadata['interrupt'] | undefined { + const args = this.currentArgs || this.getArgsFromStore() + const operation = args?.operation + if (operation === 'edit' || operation === 'delete') { + return this.metadata.interrupt + } + return undefined + } + + async handleReject(): Promise { + await super.handleReject() + this.setState(ClientToolCallState.rejected) + } + + async handleAccept(args?: ManageCustomToolArgs): Promise { + const logger = createLogger('ManageCustomToolClientTool') + try { + this.setState(ClientToolCallState.executing) + await this.executeOperation(args, logger) + } catch (e: any) { + logger.error('execute failed', { message: e?.message }) + this.setState(ClientToolCallState.error) + await this.markToolComplete(500, e?.message || 'Failed to manage custom tool', { + success: false, + error: e?.message || 'Failed to manage custom tool', + }) + } + } + + async execute(args?: ManageCustomToolArgs): Promise { + this.currentArgs = args + if (args?.operation === 'add' || args?.operation === 'list') { + await this.handleAccept(args) + } + } + + /** + * Executes the custom tool operation (add, edit, delete, or list) + */ + private async executeOperation( + args: ManageCustomToolArgs | undefined, + logger: ReturnType + ): Promise { + if (!args?.operation) { + throw new Error('Operation is required') + } + + await checkCustomToolsPermission() + + const { operation, toolId, schema, code } = args + + const { hydration } = useWorkflowRegistry.getState() + const workspaceId = hydration.workspaceId + if (!workspaceId) { + throw new Error('No active workspace found') + } + + logger.info(`Executing custom tool operation: ${operation}`, { + operation, + toolId, + functionName: schema?.function?.name, + workspaceId, + }) + + switch (operation) { + case 'add': + await this.addCustomTool({ schema, code, workspaceId }, logger) + break + case 'edit': + await this.editCustomTool({ toolId, schema, code, workspaceId }, logger) + break + case 'delete': + await this.deleteCustomTool({ toolId, workspaceId }, logger) + break + case 'list': + await this.markToolComplete(200, 'Listed custom tools') + break + default: + throw new Error(`Unknown operation: ${operation}`) + } + } + + /** + * Creates a new custom tool + */ + private async addCustomTool( + params: { + schema?: CustomToolSchema + code?: string + workspaceId: string + }, + logger: ReturnType + ): Promise { + const { schema, code, workspaceId } = params + + if (!schema) { + throw new Error('Schema is required for adding a custom tool') + } + if (!code) { + throw new Error('Code is required for adding a custom tool') + } + + const functionName = schema.function.name + + const response = await fetch(API_ENDPOINT, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + tools: [{ title: functionName, schema, code }], + workspaceId, + }), + }) + + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || 'Failed to create custom tool') + } + + if (!data.data || !Array.isArray(data.data) || data.data.length === 0) { + throw new Error('Invalid API response: missing tool data') + } + + const createdTool = data.data[0] + logger.info(`Created custom tool: ${functionName}`, { toolId: createdTool.id }) + + this.setState(ClientToolCallState.success) + await this.markToolComplete(200, `Created custom tool "${functionName}"`, { + success: true, + operation: 'add', + toolId: createdTool.id, + functionName, + }) + } + + /** + * Updates an existing custom tool + */ + private async editCustomTool( + params: { + toolId?: string + schema?: CustomToolSchema + code?: string + workspaceId: string + }, + logger: ReturnType + ): Promise { + const { toolId, schema, code, workspaceId } = params + + if (!toolId) { + throw new Error('Tool ID is required for editing a custom tool') + } + + if (!schema && !code) { + throw new Error('At least one of schema or code must be provided for editing') + } + + const existingResponse = await fetch(`${API_ENDPOINT}?workspaceId=${workspaceId}`) + const existingData = await existingResponse.json() + + if (!existingResponse.ok) { + throw new Error(existingData.error || 'Failed to fetch existing tools') + } + + const existingTool = existingData.data?.find((t: any) => t.id === toolId) + if (!existingTool) { + throw new Error(`Tool with ID ${toolId} not found`) + } + + const mergedSchema = schema ?? existingTool.schema + const updatedTool = { + id: toolId, + title: mergedSchema.function.name, + schema: mergedSchema, + code: code ?? existingTool.code, + } + + const response = await fetch(API_ENDPOINT, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + tools: [updatedTool], + workspaceId, + }), + }) + + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || 'Failed to update custom tool') + } + + const functionName = updatedTool.schema.function.name + logger.info(`Updated custom tool: ${functionName}`, { toolId }) + + this.setState(ClientToolCallState.success) + await this.markToolComplete(200, `Updated custom tool "${functionName}"`, { + success: true, + operation: 'edit', + toolId, + functionName, + }) + } + + /** + * Deletes a custom tool + */ + private async deleteCustomTool( + params: { + toolId?: string + workspaceId: string + }, + logger: ReturnType + ): Promise { + const { toolId, workspaceId } = params + + if (!toolId) { + throw new Error('Tool ID is required for deleting a custom tool') + } + + const url = `${API_ENDPOINT}?id=${toolId}&workspaceId=${workspaceId}` + const response = await fetch(url, { + method: 'DELETE', + }) + + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || 'Failed to delete custom tool') + } + + logger.info(`Deleted custom tool: ${toolId}`) + + this.setState(ClientToolCallState.success) + await this.markToolComplete(200, `Deleted custom tool`, { + success: true, + operation: 'delete', + toolId, + }) + } +} diff --git a/.tmp_old_manage_mcp_tool_client.ts b/.tmp_old_manage_mcp_tool_client.ts new file mode 100644 index 0000000000..796574dc1b --- /dev/null +++ b/.tmp_old_manage_mcp_tool_client.ts @@ -0,0 +1,360 @@ +import { createLogger } from '@sim/logger' +import { Check, Loader2, Server, X, XCircle } from 'lucide-react' +import { client } from '@/lib/auth/auth-client' +import { + BaseClientTool, + type BaseClientToolMetadata, + ClientToolCallState, +} from '@/lib/copilot/tools/client/base-tool' +import { useCopilotStore } from '@/stores/panel/copilot/store' +import { useWorkflowRegistry } from '@/stores/workflows/registry/store' + +interface McpServerConfig { + name: string + transport: 'streamable-http' + url?: string + headers?: Record + timeout?: number + enabled?: boolean +} + +interface ManageMcpToolArgs { + operation: 'add' | 'edit' | 'delete' + serverId?: string + config?: McpServerConfig +} + +const API_ENDPOINT = '/api/mcp/servers' + +async function checkMcpToolsPermission(): Promise { + const activeOrgResponse = await client.organization.getFullOrganization() + const organizationId = activeOrgResponse.data?.id + if (!organizationId) return + + const response = await fetch(`/api/permission-groups/user?organizationId=${organizationId}`) + if (!response.ok) return + + const data = await response.json() + if (data?.config?.disableMcpTools) { + throw new Error('MCP tools are not allowed based on your permission group settings') + } +} + +/** + * Client tool for creating, editing, and deleting MCP tool servers via the copilot. + */ +export class ManageMcpToolClientTool extends BaseClientTool { + static readonly id = 'manage_mcp_tool' + private currentArgs?: ManageMcpToolArgs + + constructor(toolCallId: string) { + super(toolCallId, ManageMcpToolClientTool.id, ManageMcpToolClientTool.metadata) + } + + static readonly metadata: BaseClientToolMetadata = { + displayNames: { + [ClientToolCallState.generating]: { + text: 'Managing MCP tool', + icon: Loader2, + }, + [ClientToolCallState.pending]: { text: 'Manage MCP tool?', icon: Server }, + [ClientToolCallState.executing]: { text: 'Managing MCP tool', icon: Loader2 }, + [ClientToolCallState.success]: { text: 'Managed MCP tool', icon: Check }, + [ClientToolCallState.error]: { text: 'Failed to manage MCP tool', icon: X }, + [ClientToolCallState.aborted]: { + text: 'Aborted managing MCP tool', + icon: XCircle, + }, + [ClientToolCallState.rejected]: { + text: 'Skipped managing MCP tool', + icon: XCircle, + }, + }, + interrupt: { + accept: { text: 'Allow', icon: Check }, + reject: { text: 'Skip', icon: XCircle }, + }, + getDynamicText: (params, state) => { + const operation = params?.operation as 'add' | 'edit' | 'delete' | undefined + + if (!operation) return undefined + + const serverName = params?.config?.name || params?.serverName + + const getActionText = (verb: 'present' | 'past' | 'gerund') => { + switch (operation) { + case 'add': + return verb === 'present' ? 'Add' : verb === 'past' ? 'Added' : 'Adding' + case 'edit': + return verb === 'present' ? 'Edit' : verb === 'past' ? 'Edited' : 'Editing' + case 'delete': + return verb === 'present' ? 'Delete' : verb === 'past' ? 'Deleted' : 'Deleting' + } + } + + const shouldShowServerName = (currentState: ClientToolCallState) => { + if (operation === 'add') { + return currentState === ClientToolCallState.success + } + return true + } + + const nameText = shouldShowServerName(state) && serverName ? ` ${serverName}` : ' MCP tool' + + switch (state) { + case ClientToolCallState.success: + return `${getActionText('past')}${nameText}` + case ClientToolCallState.executing: + return `${getActionText('gerund')}${nameText}` + case ClientToolCallState.generating: + return `${getActionText('gerund')}${nameText}` + case ClientToolCallState.pending: + return `${getActionText('present')}${nameText}?` + case ClientToolCallState.error: + return `Failed to ${getActionText('present')?.toLowerCase()}${nameText}` + case ClientToolCallState.aborted: + return `Aborted ${getActionText('gerund')?.toLowerCase()}${nameText}` + case ClientToolCallState.rejected: + return `Skipped ${getActionText('gerund')?.toLowerCase()}${nameText}` + } + return undefined + }, + } + + /** + * Gets the tool call args from the copilot store (needed before execute() is called) + */ + private getArgsFromStore(): ManageMcpToolArgs | undefined { + try { + const { toolCallsById } = useCopilotStore.getState() + const toolCall = toolCallsById[this.toolCallId] + return (toolCall as any)?.params as ManageMcpToolArgs | undefined + } catch { + return undefined + } + } + + /** + * Override getInterruptDisplays to only show confirmation for edit and delete operations. + * Add operations execute directly without confirmation. + */ + getInterruptDisplays(): BaseClientToolMetadata['interrupt'] | undefined { + const args = this.currentArgs || this.getArgsFromStore() + const operation = args?.operation + if (operation === 'edit' || operation === 'delete') { + return this.metadata.interrupt + } + return undefined + } + + async handleReject(): Promise { + await super.handleReject() + this.setState(ClientToolCallState.rejected) + } + + async handleAccept(args?: ManageMcpToolArgs): Promise { + const logger = createLogger('ManageMcpToolClientTool') + try { + this.setState(ClientToolCallState.executing) + await this.executeOperation(args, logger) + } catch (e: any) { + logger.error('execute failed', { message: e?.message }) + this.setState(ClientToolCallState.error) + await this.markToolComplete(500, e?.message || 'Failed to manage MCP tool', { + success: false, + error: e?.message || 'Failed to manage MCP tool', + }) + } + } + + async execute(args?: ManageMcpToolArgs): Promise { + this.currentArgs = args + if (args?.operation === 'add') { + await this.handleAccept(args) + } + } + + /** + * Executes the MCP tool operation (add, edit, or delete) + */ + private async executeOperation( + args: ManageMcpToolArgs | undefined, + logger: ReturnType + ): Promise { + if (!args?.operation) { + throw new Error('Operation is required') + } + + await checkMcpToolsPermission() + + const { operation, serverId, config } = args + + const { hydration } = useWorkflowRegistry.getState() + const workspaceId = hydration.workspaceId + if (!workspaceId) { + throw new Error('No active workspace found') + } + + logger.info(`Executing MCP tool operation: ${operation}`, { + operation, + serverId, + serverName: config?.name, + workspaceId, + }) + + switch (operation) { + case 'add': + await this.addMcpServer({ config, workspaceId }, logger) + break + case 'edit': + await this.editMcpServer({ serverId, config, workspaceId }, logger) + break + case 'delete': + await this.deleteMcpServer({ serverId, workspaceId }, logger) + break + default: + throw new Error(`Unknown operation: ${operation}`) + } + } + + /** + * Creates a new MCP server + */ + private async addMcpServer( + params: { + config?: McpServerConfig + workspaceId: string + }, + logger: ReturnType + ): Promise { + const { config, workspaceId } = params + + if (!config) { + throw new Error('Config is required for adding an MCP tool') + } + if (!config.name) { + throw new Error('Server name is required') + } + if (!config.url) { + throw new Error('Server URL is required for streamable-http transport') + } + + const serverData = { + ...config, + workspaceId, + transport: config.transport || 'streamable-http', + timeout: config.timeout || 30000, + enabled: config.enabled !== false, + } + + const response = await fetch(API_ENDPOINT, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(serverData), + }) + + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || 'Failed to create MCP tool') + } + + const serverId = data.data?.serverId + logger.info(`Created MCP tool: ${config.name}`, { serverId }) + + this.setState(ClientToolCallState.success) + await this.markToolComplete(200, `Created MCP tool "${config.name}"`, { + success: true, + operation: 'add', + serverId, + serverName: config.name, + }) + } + + /** + * Updates an existing MCP server + */ + private async editMcpServer( + params: { + serverId?: string + config?: McpServerConfig + workspaceId: string + }, + logger: ReturnType + ): Promise { + const { serverId, config, workspaceId } = params + + if (!serverId) { + throw new Error('Server ID is required for editing an MCP tool') + } + + if (!config) { + throw new Error('Config is required for editing an MCP tool') + } + + const updateData = { + ...config, + workspaceId, + } + + const response = await fetch(`${API_ENDPOINT}/${serverId}?workspaceId=${workspaceId}`, { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(updateData), + }) + + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || 'Failed to update MCP tool') + } + + const serverName = config.name || data.data?.server?.name || serverId + logger.info(`Updated MCP tool: ${serverName}`, { serverId }) + + this.setState(ClientToolCallState.success) + await this.markToolComplete(200, `Updated MCP tool "${serverName}"`, { + success: true, + operation: 'edit', + serverId, + serverName, + }) + } + + /** + * Deletes an MCP server + */ + private async deleteMcpServer( + params: { + serverId?: string + workspaceId: string + }, + logger: ReturnType + ): Promise { + const { serverId, workspaceId } = params + + if (!serverId) { + throw new Error('Server ID is required for deleting an MCP tool') + } + + const url = `${API_ENDPOINT}?serverId=${serverId}&workspaceId=${workspaceId}` + const response = await fetch(url, { + method: 'DELETE', + }) + + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || 'Failed to delete MCP tool') + } + + logger.info(`Deleted MCP tool: ${serverId}`) + + this.setState(ClientToolCallState.success) + await this.markToolComplete(200, `Deleted MCP tool`, { + success: true, + operation: 'delete', + serverId, + }) + } +} diff --git a/.tmp_old_oauth_request_access_client.ts b/.tmp_old_oauth_request_access_client.ts new file mode 100644 index 0000000000..725f73bc72 --- /dev/null +++ b/.tmp_old_oauth_request_access_client.ts @@ -0,0 +1,174 @@ +import { createLogger } from '@sim/logger' +import { CheckCircle, Loader2, MinusCircle, PlugZap, X, XCircle } from 'lucide-react' +import { + BaseClientTool, + type BaseClientToolMetadata, + ClientToolCallState, +} from '@/lib/copilot/tools/client/base-tool' +import { OAUTH_PROVIDERS, type OAuthServiceConfig } from '@/lib/oauth' + +const logger = createLogger('OAuthRequestAccessClientTool') + +interface OAuthRequestAccessArgs { + providerName?: string +} + +interface ResolvedServiceInfo { + serviceId: string + providerId: string + service: OAuthServiceConfig +} + +/** + * Finds the service configuration from a provider name. + * The providerName should match the exact `name` field returned by get_credentials tool's notConnected services. + */ +function findServiceByName(providerName: string): ResolvedServiceInfo | null { + const normalizedName = providerName.toLowerCase().trim() + + // First pass: exact match (case-insensitive) + for (const [, providerConfig] of Object.entries(OAUTH_PROVIDERS)) { + for (const [serviceId, service] of Object.entries(providerConfig.services)) { + if (service.name.toLowerCase() === normalizedName) { + return { serviceId, providerId: service.providerId, service } + } + } + } + + // Second pass: partial match as fallback for flexibility + for (const [, providerConfig] of Object.entries(OAUTH_PROVIDERS)) { + for (const [serviceId, service] of Object.entries(providerConfig.services)) { + if ( + service.name.toLowerCase().includes(normalizedName) || + normalizedName.includes(service.name.toLowerCase()) + ) { + return { serviceId, providerId: service.providerId, service } + } + } + } + + return null +} + +export interface OAuthConnectEventDetail { + providerName: string + serviceId: string + providerId: string + requiredScopes: string[] + newScopes?: string[] +} + +export class OAuthRequestAccessClientTool extends BaseClientTool { + static readonly id = 'oauth_request_access' + + private providerName?: string + + constructor(toolCallId: string) { + super(toolCallId, OAuthRequestAccessClientTool.id, OAuthRequestAccessClientTool.metadata) + } + + static readonly metadata: BaseClientToolMetadata = { + displayNames: { + [ClientToolCallState.generating]: { text: 'Requesting integration access', icon: Loader2 }, + [ClientToolCallState.pending]: { text: 'Requesting integration access', icon: Loader2 }, + [ClientToolCallState.executing]: { text: 'Requesting integration access', icon: Loader2 }, + [ClientToolCallState.rejected]: { text: 'Skipped integration access', icon: MinusCircle }, + [ClientToolCallState.success]: { text: 'Requested integration access', icon: CheckCircle }, + [ClientToolCallState.error]: { text: 'Failed to request integration access', icon: X }, + [ClientToolCallState.aborted]: { text: 'Aborted integration access request', icon: XCircle }, + }, + interrupt: { + accept: { text: 'Connect', icon: PlugZap }, + reject: { text: 'Skip', icon: MinusCircle }, + }, + getDynamicText: (params, state) => { + if (params.providerName) { + const name = params.providerName + switch (state) { + case ClientToolCallState.generating: + case ClientToolCallState.pending: + case ClientToolCallState.executing: + return `Requesting ${name} access` + case ClientToolCallState.rejected: + return `Skipped ${name} access` + case ClientToolCallState.success: + return `Requested ${name} access` + case ClientToolCallState.error: + return `Failed to request ${name} access` + case ClientToolCallState.aborted: + return `Aborted ${name} access request` + } + } + return undefined + }, + } + + async handleAccept(args?: OAuthRequestAccessArgs): Promise { + try { + if (args?.providerName) { + this.providerName = args.providerName + } + + if (!this.providerName) { + logger.error('No provider name provided') + this.setState(ClientToolCallState.error) + await this.markToolComplete(400, 'No provider name specified') + return + } + + // Find the service by name + const serviceInfo = findServiceByName(this.providerName) + if (!serviceInfo) { + logger.error('Could not find OAuth service for provider', { + providerName: this.providerName, + }) + this.setState(ClientToolCallState.error) + await this.markToolComplete(400, `Unknown provider: ${this.providerName}`) + return + } + + const { serviceId, providerId, service } = serviceInfo + + logger.info('Opening OAuth connect modal', { + providerName: this.providerName, + serviceId, + providerId, + }) + + // Move to executing state + this.setState(ClientToolCallState.executing) + + // Dispatch event to open the OAuth modal (same pattern as open-settings) + window.dispatchEvent( + new CustomEvent('open-oauth-connect', { + detail: { + providerName: this.providerName, + serviceId, + providerId, + requiredScopes: service.scopes || [], + }, + }) + ) + + // Mark as success - the user opened the prompt, but connection is not guaranteed + this.setState(ClientToolCallState.success) + await this.markToolComplete( + 200, + `The user opened the ${this.providerName} connection prompt and may have connected. Check the connected integrations to verify the connection status.` + ) + } catch (e) { + logger.error('Failed to open OAuth connect modal', { error: e }) + this.setState(ClientToolCallState.error) + await this.markToolComplete(500, 'Failed to open OAuth connection dialog') + } + } + + async handleReject(): Promise { + await super.handleReject() + this.setState(ClientToolCallState.rejected) + } + + async execute(args?: OAuthRequestAccessArgs): Promise { + await this.handleAccept(args) + } +} diff --git a/.tmp_old_process_contents.ts b/.tmp_old_process_contents.ts new file mode 100644 index 0000000000..13a0015f04 --- /dev/null +++ b/.tmp_old_process_contents.ts @@ -0,0 +1,555 @@ +import { db } from '@sim/db' +import { copilotChats, document, knowledgeBase, templates } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, isNull } from 'drizzle-orm' +import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils' +import { sanitizeForCopilot } from '@/lib/workflows/sanitization/json-sanitizer' +import { isHiddenFromDisplay } from '@/blocks/types' +import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check' +import { escapeRegExp } from '@/executor/constants' +import type { ChatContext } from '@/stores/panel/copilot/types' + +export type AgentContextType = + | 'past_chat' + | 'workflow' + | 'current_workflow' + | 'blocks' + | 'logs' + | 'knowledge' + | 'templates' + | 'workflow_block' + | 'docs' + +export interface AgentContext { + type: AgentContextType + tag: string + content: string +} + +const logger = createLogger('ProcessContents') + +export async function processContexts( + contexts: ChatContext[] | undefined +): Promise { + if (!Array.isArray(contexts) || contexts.length === 0) return [] + const tasks = contexts.map(async (ctx) => { + try { + if (ctx.kind === 'past_chat') { + return await processPastChatViaApi(ctx.chatId, ctx.label ? `@${ctx.label}` : '@') + } + if ((ctx.kind === 'workflow' || ctx.kind === 'current_workflow') && ctx.workflowId) { + return await processWorkflowFromDb( + ctx.workflowId, + ctx.label ? `@${ctx.label}` : '@', + ctx.kind + ) + } + if (ctx.kind === 'knowledge' && (ctx as any).knowledgeId) { + return await processKnowledgeFromDb( + (ctx as any).knowledgeId, + ctx.label ? `@${ctx.label}` : '@' + ) + } + if (ctx.kind === 'blocks' && (ctx as any).blockId) { + return await processBlockMetadata((ctx as any).blockId, ctx.label ? `@${ctx.label}` : '@') + } + if (ctx.kind === 'templates' && (ctx as any).templateId) { + return await processTemplateFromDb( + (ctx as any).templateId, + ctx.label ? `@${ctx.label}` : '@' + ) + } + if (ctx.kind === 'logs' && (ctx as any).executionId) { + return await processExecutionLogFromDb( + (ctx as any).executionId, + ctx.label ? `@${ctx.label}` : '@' + ) + } + if (ctx.kind === 'workflow_block' && ctx.workflowId && (ctx as any).blockId) { + return await processWorkflowBlockFromDb(ctx.workflowId, (ctx as any).blockId, ctx.label) + } + // Other kinds can be added here: workflow, blocks, logs, knowledge, templates, docs + return null + } catch (error) { + logger.error('Failed processing context', { ctx, error }) + return null + } + }) + + const results = await Promise.all(tasks) + return results.filter((r): r is AgentContext => !!r) as AgentContext[] +} + +// Server-side variant (recommended for use in API routes) +export async function processContextsServer( + contexts: ChatContext[] | undefined, + userId: string, + userMessage?: string +): Promise { + if (!Array.isArray(contexts) || contexts.length === 0) return [] + const tasks = contexts.map(async (ctx) => { + try { + if (ctx.kind === 'past_chat' && ctx.chatId) { + return await processPastChatFromDb(ctx.chatId, userId, ctx.label ? `@${ctx.label}` : '@') + } + if ((ctx.kind === 'workflow' || ctx.kind === 'current_workflow') && ctx.workflowId) { + return await processWorkflowFromDb( + ctx.workflowId, + ctx.label ? `@${ctx.label}` : '@', + ctx.kind + ) + } + if (ctx.kind === 'knowledge' && (ctx as any).knowledgeId) { + return await processKnowledgeFromDb( + (ctx as any).knowledgeId, + ctx.label ? `@${ctx.label}` : '@' + ) + } + if (ctx.kind === 'blocks' && (ctx as any).blockId) { + return await processBlockMetadata( + (ctx as any).blockId, + ctx.label ? `@${ctx.label}` : '@', + userId + ) + } + if (ctx.kind === 'templates' && (ctx as any).templateId) { + return await processTemplateFromDb( + (ctx as any).templateId, + ctx.label ? `@${ctx.label}` : '@' + ) + } + if (ctx.kind === 'logs' && (ctx as any).executionId) { + return await processExecutionLogFromDb( + (ctx as any).executionId, + ctx.label ? `@${ctx.label}` : '@' + ) + } + if (ctx.kind === 'workflow_block' && ctx.workflowId && (ctx as any).blockId) { + return await processWorkflowBlockFromDb(ctx.workflowId, (ctx as any).blockId, ctx.label) + } + if (ctx.kind === 'docs') { + try { + const { searchDocumentationServerTool } = await import( + '@/lib/copilot/tools/server/docs/search-documentation' + ) + const rawQuery = (userMessage || '').trim() || ctx.label || 'Sim documentation' + const query = sanitizeMessageForDocs(rawQuery, contexts) + const res = await searchDocumentationServerTool.execute({ query, topK: 10 }) + const content = JSON.stringify(res?.results || []) + return { type: 'docs', tag: ctx.label ? `@${ctx.label}` : '@', content } + } catch (e) { + logger.error('Failed to process docs context', e) + return null + } + } + return null + } catch (error) { + logger.error('Failed processing context (server)', { ctx, error }) + return null + } + }) + const results = await Promise.all(tasks) + const filtered = results.filter( + (r): r is AgentContext => !!r && typeof r.content === 'string' && r.content.trim().length > 0 + ) + logger.info('Processed contexts (server)', { + totalRequested: contexts.length, + totalProcessed: filtered.length, + kinds: Array.from(filtered.reduce((s, r) => s.add(r.type), new Set())), + }) + return filtered +} + +function sanitizeMessageForDocs(rawMessage: string, contexts: ChatContext[] | undefined): string { + if (!rawMessage) return '' + if (!Array.isArray(contexts) || contexts.length === 0) { + // No context mapping; conservatively strip all @mentions-like tokens + const stripped = rawMessage + .replace(/(^|\s)@([^\s]+)/g, ' ') + .replace(/\s{2,}/g, ' ') + .trim() + return stripped + } + + // Gather labels by kind + const blockLabels = new Set( + contexts + .filter((c) => c.kind === 'blocks') + .map((c) => c.label) + .filter((l): l is string => typeof l === 'string' && l.length > 0) + ) + const nonBlockLabels = new Set( + contexts + .filter((c) => c.kind !== 'blocks') + .map((c) => c.label) + .filter((l): l is string => typeof l === 'string' && l.length > 0) + ) + + let result = rawMessage + + // 1) Remove all non-block mentions entirely + for (const label of nonBlockLabels) { + const pattern = new RegExp(`(^|\\s)@${escapeRegExp(label)}(?!\\S)`, 'g') + result = result.replace(pattern, ' ') + } + + // 2) For block mentions, strip the '@' but keep the block name + for (const label of blockLabels) { + const pattern = new RegExp(`@${escapeRegExp(label)}(?!\\S)`, 'g') + result = result.replace(pattern, label) + } + + // 3) Remove any remaining @mentions (unknown or not in contexts) + result = result.replace(/(^|\s)@([^\s]+)/g, ' ') + + // Normalize whitespace + result = result.replace(/\s{2,}/g, ' ').trim() + return result +} + +async function processPastChatFromDb( + chatId: string, + userId: string, + tag: string +): Promise { + try { + const rows = await db + .select({ messages: copilotChats.messages }) + .from(copilotChats) + .where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, userId))) + .limit(1) + const messages = Array.isArray(rows?.[0]?.messages) ? (rows[0] as any).messages : [] + const content = messages + .map((m: any) => { + const role = m.role || 'user' + let text = '' + if (Array.isArray(m.contentBlocks) && m.contentBlocks.length > 0) { + text = m.contentBlocks + .filter((b: any) => b?.type === 'text') + .map((b: any) => String(b.content || '')) + .join('') + .trim() + } + if (!text && typeof m.content === 'string') text = m.content + return `${role}: ${text}`.trim() + }) + .filter((s: string) => s.length > 0) + .join('\n') + logger.info('Processed past_chat context from DB', { + chatId, + length: content.length, + lines: content ? content.split('\n').length : 0, + }) + return { type: 'past_chat', tag, content } + } catch (error) { + logger.error('Error processing past chat from db', { chatId, error }) + return null + } +} + +async function processWorkflowFromDb( + workflowId: string, + tag: string, + kind: 'workflow' | 'current_workflow' = 'workflow' +): Promise { + try { + const normalized = await loadWorkflowFromNormalizedTables(workflowId) + if (!normalized) { + logger.warn('No normalized workflow data found', { workflowId }) + return null + } + const workflowState = { + blocks: normalized.blocks || {}, + edges: normalized.edges || [], + loops: normalized.loops || {}, + parallels: normalized.parallels || {}, + } + // Sanitize workflow state for copilot (remove UI-specific data like positions) + const sanitizedState = sanitizeForCopilot(workflowState) + // Match get-user-workflow format: just the workflow state JSON + const content = JSON.stringify(sanitizedState, null, 2) + logger.info('Processed sanitized workflow context', { + workflowId, + blocks: Object.keys(sanitizedState.blocks || {}).length, + }) + // Use the provided kind for the type + return { type: kind, tag, content } + } catch (error) { + logger.error('Error processing workflow context', { workflowId, error }) + return null + } +} + +async function processPastChat(chatId: string, tagOverride?: string): Promise { + try { + const resp = await fetch(`/api/copilot/chat/${encodeURIComponent(chatId)}`) + if (!resp.ok) { + logger.error('Failed to fetch past chat', { chatId, status: resp.status }) + return null + } + const data = await resp.json() + const messages = Array.isArray(data?.chat?.messages) ? data.chat.messages : [] + const content = messages + .map((m: any) => { + const role = m.role || 'user' + // Prefer contentBlocks text if present (joins text blocks), else use content + let text = '' + if (Array.isArray(m.contentBlocks) && m.contentBlocks.length > 0) { + text = m.contentBlocks + .filter((b: any) => b?.type === 'text') + .map((b: any) => String(b.content || '')) + .join('') + .trim() + } + if (!text && typeof m.content === 'string') text = m.content + return `${role}: ${text}`.trim() + }) + .filter((s: string) => s.length > 0) + .join('\n') + logger.info('Processed past_chat context via API', { chatId, length: content.length }) + + return { type: 'past_chat', tag: tagOverride || '@', content } + } catch (error) { + logger.error('Error processing past chat', { chatId, error }) + return null + } +} + +// Back-compat alias; used by processContexts above +async function processPastChatViaApi(chatId: string, tag?: string) { + return processPastChat(chatId, tag) +} + +async function processKnowledgeFromDb( + knowledgeBaseId: string, + tag: string +): Promise { + try { + // Load KB metadata + const kbRows = await db + .select({ + id: knowledgeBase.id, + name: knowledgeBase.name, + updatedAt: knowledgeBase.updatedAt, + }) + .from(knowledgeBase) + .where(and(eq(knowledgeBase.id, knowledgeBaseId), isNull(knowledgeBase.deletedAt))) + .limit(1) + const kb = kbRows?.[0] + if (!kb) return null + + // Load up to 20 recent doc filenames + const docRows = await db + .select({ filename: document.filename }) + .from(document) + .where(and(eq(document.knowledgeBaseId, knowledgeBaseId), isNull(document.deletedAt))) + .limit(20) + + const sampleDocuments = docRows.map((d: any) => d.filename).filter(Boolean) + // We don't have total via this quick select; fallback to sample count + const summary = { + id: kb.id, + name: kb.name, + docCount: sampleDocuments.length, + sampleDocuments, + } + const content = JSON.stringify(summary) + return { type: 'knowledge', tag, content } + } catch (error) { + logger.error('Error processing knowledge context (db)', { knowledgeBaseId, error }) + return null + } +} + +async function processBlockMetadata( + blockId: string, + tag: string, + userId?: string +): Promise { + try { + if (userId) { + const permissionConfig = await getUserPermissionConfig(userId) + const allowedIntegrations = permissionConfig?.allowedIntegrations + if (allowedIntegrations != null && !allowedIntegrations.includes(blockId)) { + logger.debug('Block not allowed by permission group', { blockId, userId }) + return null + } + } + + // Reuse registry to match get_blocks_metadata tool result + const { registry: blockRegistry } = await import('@/blocks/registry') + const { tools: toolsRegistry } = await import('@/tools/registry') + const SPECIAL_BLOCKS_METADATA: Record = {} + + let metadata: any = {} + if ((SPECIAL_BLOCKS_METADATA as any)[blockId]) { + metadata = { ...(SPECIAL_BLOCKS_METADATA as any)[blockId] } + metadata.tools = metadata.tools?.access || [] + } else { + const blockConfig: any = (blockRegistry as any)[blockId] + if (!blockConfig) { + return null + } + metadata = { + id: blockId, + name: blockConfig.name || blockId, + description: blockConfig.description || '', + longDescription: blockConfig.longDescription, + category: blockConfig.category, + bgColor: blockConfig.bgColor, + inputs: blockConfig.inputs || {}, + outputs: blockConfig.outputs + ? Object.fromEntries( + Object.entries(blockConfig.outputs).filter(([_, def]) => !isHiddenFromDisplay(def)) + ) + : {}, + tools: blockConfig.tools?.access || [], + hideFromToolbar: blockConfig.hideFromToolbar, + } + if (blockConfig.subBlocks && Array.isArray(blockConfig.subBlocks)) { + metadata.subBlocks = (blockConfig.subBlocks as any[]).map((sb: any) => ({ + id: sb.id, + name: sb.name, + type: sb.type, + description: sb.description, + default: sb.default, + options: Array.isArray(sb.options) ? sb.options : [], + })) + } else { + metadata.subBlocks = [] + } + } + + if (Array.isArray(metadata.tools) && metadata.tools.length > 0) { + metadata.toolDetails = {} + for (const toolId of metadata.tools) { + const tool = (toolsRegistry as any)[toolId] + if (tool) { + metadata.toolDetails[toolId] = { name: tool.name, description: tool.description } + } + } + } + + const content = JSON.stringify({ metadata }) + return { type: 'blocks', tag, content } + } catch (error) { + logger.error('Error processing block metadata', { blockId, error }) + return null + } +} + +async function processTemplateFromDb( + templateId: string, + tag: string +): Promise { + try { + const rows = await db + .select({ + id: templates.id, + name: templates.name, + details: templates.details, + stars: templates.stars, + state: templates.state, + }) + .from(templates) + .where(eq(templates.id, templateId)) + .limit(1) + const t = rows?.[0] + if (!t) return null + const workflowState = t.state || {} + const summary = { + id: t.id, + name: t.name, + description: (t.details as any)?.tagline || '', + stars: t.stars || 0, + workflow: workflowState, + } + const content = JSON.stringify(summary) + return { type: 'templates', tag, content } + } catch (error) { + logger.error('Error processing template context (db)', { templateId, error }) + return null + } +} + +async function processWorkflowBlockFromDb( + workflowId: string, + blockId: string, + label?: string +): Promise { + try { + const normalized = await loadWorkflowFromNormalizedTables(workflowId) + if (!normalized) return null + const block = (normalized.blocks as any)[blockId] + if (!block) return null + const tag = label ? `@${label} in Workflow` : `@${block.name || blockId} in Workflow` + + // Build content: isolate the block and include its subBlocks fully + const contentObj = { + workflowId, + block: block, + } + const content = JSON.stringify(contentObj) + return { type: 'workflow_block', tag, content } + } catch (error) { + logger.error('Error processing workflow_block context', { workflowId, blockId, error }) + return null + } +} + +async function processExecutionLogFromDb( + executionId: string, + tag: string +): Promise { + try { + const { workflowExecutionLogs, workflow } = await import('@sim/db/schema') + const { db } = await import('@sim/db') + const rows = await db + .select({ + id: workflowExecutionLogs.id, + workflowId: workflowExecutionLogs.workflowId, + executionId: workflowExecutionLogs.executionId, + level: workflowExecutionLogs.level, + trigger: workflowExecutionLogs.trigger, + startedAt: workflowExecutionLogs.startedAt, + endedAt: workflowExecutionLogs.endedAt, + totalDurationMs: workflowExecutionLogs.totalDurationMs, + executionData: workflowExecutionLogs.executionData, + cost: workflowExecutionLogs.cost, + workflowName: workflow.name, + }) + .from(workflowExecutionLogs) + .innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id)) + .where(eq(workflowExecutionLogs.executionId, executionId)) + .limit(1) + + const log = rows?.[0] as any + if (!log) return null + + const summary = { + id: log.id, + workflowId: log.workflowId, + executionId: log.executionId, + level: log.level, + trigger: log.trigger, + startedAt: log.startedAt?.toISOString?.() || String(log.startedAt), + endedAt: log.endedAt?.toISOString?.() || (log.endedAt ? String(log.endedAt) : null), + totalDurationMs: log.totalDurationMs ?? null, + workflowName: log.workflowName || '', + // Include trace spans and any available details without being huge + executionData: log.executionData + ? { + traceSpans: (log.executionData as any).traceSpans || undefined, + errorDetails: (log.executionData as any).errorDetails || undefined, + } + : undefined, + cost: log.cost || undefined, + } + + const content = JSON.stringify(summary) + return { type: 'logs', tag, content } + } catch (error) { + logger.error('Error processing execution log context (db)', { executionId, error }) + return null + } +} diff --git a/.tmp_old_shared_schemas.ts b/.tmp_old_shared_schemas.ts new file mode 100644 index 0000000000..2377aecf79 --- /dev/null +++ b/.tmp_old_shared_schemas.ts @@ -0,0 +1,178 @@ +import { z } from 'zod' + +// Generic envelope used by client to validate API responses +export const ExecuteResponseSuccessSchema = z.object({ + success: z.literal(true), + result: z.unknown(), +}) +export type ExecuteResponseSuccess = z.infer + +// get_blocks_and_tools +export const GetBlocksAndToolsInput = z.object({}) +export const GetBlocksAndToolsResult = z.object({ + blocks: z.array( + z + .object({ + type: z.string(), + name: z.string(), + triggerAllowed: z.boolean().optional(), + longDescription: z.string().optional(), + }) + .passthrough() + ), +}) +export type GetBlocksAndToolsResultType = z.infer + +// get_blocks_metadata +export const GetBlocksMetadataInput = z.object({ blockIds: z.array(z.string()).min(1) }) +export const GetBlocksMetadataResult = z.object({ metadata: z.record(z.any()) }) +export type GetBlocksMetadataResultType = z.infer + +// get_trigger_blocks +export const GetTriggerBlocksInput = z.object({}) +export const GetTriggerBlocksResult = z.object({ + triggerBlockIds: z.array(z.string()), +}) +export type GetTriggerBlocksResultType = z.infer + +// get_block_options +export const GetBlockOptionsInput = z.object({ + blockId: z.string(), +}) +export const GetBlockOptionsResult = z.object({ + blockId: z.string(), + blockName: z.string(), + operations: z.array( + z.object({ + id: z.string(), + name: z.string(), + description: z.string().optional(), + }) + ), +}) +export type GetBlockOptionsInputType = z.infer +export type GetBlockOptionsResultType = z.infer + +// get_block_config +export const GetBlockConfigInput = z.object({ + blockType: z.string(), + operation: z.string().optional(), + trigger: z.boolean().optional(), +}) +export const GetBlockConfigResult = z.object({ + blockType: z.string(), + blockName: z.string(), + operation: z.string().optional(), + trigger: z.boolean().optional(), + inputs: z.record(z.any()), + outputs: z.record(z.any()), +}) +export type GetBlockConfigInputType = z.infer +export type GetBlockConfigResultType = z.infer + +// knowledge_base - shared schema used by client tool, server tool, and registry +export const KnowledgeBaseArgsSchema = z.object({ + operation: z.enum(['create', 'list', 'get', 'query']), + args: z + .object({ + /** Name of the knowledge base (required for create) */ + name: z.string().optional(), + /** Description of the knowledge base (optional for create) */ + description: z.string().optional(), + /** Workspace ID to associate with (required for create, optional for list) */ + workspaceId: z.string().optional(), + /** Knowledge base ID (required for get, query) */ + knowledgeBaseId: z.string().optional(), + /** Search query text (required for query) */ + query: z.string().optional(), + /** Number of results to return (optional for query, defaults to 5) */ + topK: z.number().min(1).max(50).optional(), + /** Chunking configuration (optional for create) */ + chunkingConfig: z + .object({ + maxSize: z.number().min(100).max(4000).default(1024), + minSize: z.number().min(1).max(2000).default(1), + overlap: z.number().min(0).max(500).default(200), + }) + .optional(), + }) + .optional(), +}) +export type KnowledgeBaseArgs = z.infer + +export const KnowledgeBaseResultSchema = z.object({ + success: z.boolean(), + message: z.string(), + data: z.any().optional(), +}) +export type KnowledgeBaseResult = z.infer + +export const GetBlockOutputsInput = z.object({ + blockIds: z.array(z.string()).optional(), +}) +export const GetBlockOutputsResult = z.object({ + blocks: z.array( + z.object({ + blockId: z.string(), + blockName: z.string(), + blockType: z.string(), + triggerMode: z.boolean().optional(), + outputs: z.array(z.string()), + insideSubflowOutputs: z.array(z.string()).optional(), + outsideSubflowOutputs: z.array(z.string()).optional(), + }) + ), + variables: z + .array( + z.object({ + id: z.string(), + name: z.string(), + type: z.string(), + tag: z.string(), + }) + ) + .optional(), +}) +export type GetBlockOutputsInputType = z.infer +export type GetBlockOutputsResultType = z.infer + +export const GetBlockUpstreamReferencesInput = z.object({ + blockIds: z.array(z.string()).min(1), +}) +export const GetBlockUpstreamReferencesResult = z.object({ + results: z.array( + z.object({ + blockId: z.string(), + blockName: z.string(), + insideSubflows: z + .array( + z.object({ + blockId: z.string(), + blockName: z.string(), + blockType: z.string(), + }) + ) + .optional(), + accessibleBlocks: z.array( + z.object({ + blockId: z.string(), + blockName: z.string(), + blockType: z.string(), + triggerMode: z.boolean().optional(), + outputs: z.array(z.string()), + accessContext: z.enum(['inside', 'outside']).optional(), + }) + ), + variables: z.array( + z.object({ + id: z.string(), + name: z.string(), + type: z.string(), + tag: z.string(), + }) + ), + }) + ), +}) +export type GetBlockUpstreamReferencesInputType = z.infer +export type GetBlockUpstreamReferencesResultType = z.infer diff --git a/.tmp_old_store.ts b/.tmp_old_store.ts new file mode 100644 index 0000000000..e368d412e1 --- /dev/null +++ b/.tmp_old_store.ts @@ -0,0 +1,4239 @@ +'use client' + +import { createLogger } from '@sim/logger' +import { create } from 'zustand' +import { devtools } from 'zustand/middleware' +import { type CopilotChat, sendStreamingMessage } from '@/lib/copilot/api' +import type { CopilotTransportMode } from '@/lib/copilot/models' +import type { + BaseClientToolMetadata, + ClientToolDisplay, +} from '@/lib/copilot/tools/client/base-tool' +import { ClientToolCallState } from '@/lib/copilot/tools/client/base-tool' +import { GetBlockConfigClientTool } from '@/lib/copilot/tools/client/blocks/get-block-config' +import { GetBlockOptionsClientTool } from '@/lib/copilot/tools/client/blocks/get-block-options' +import { GetBlocksAndToolsClientTool } from '@/lib/copilot/tools/client/blocks/get-blocks-and-tools' +import { GetBlocksMetadataClientTool } from '@/lib/copilot/tools/client/blocks/get-blocks-metadata' +import { GetTriggerBlocksClientTool } from '@/lib/copilot/tools/client/blocks/get-trigger-blocks' +import { GetExamplesRagClientTool } from '@/lib/copilot/tools/client/examples/get-examples-rag' +import { GetOperationsExamplesClientTool } from '@/lib/copilot/tools/client/examples/get-operations-examples' +import { GetTriggerExamplesClientTool } from '@/lib/copilot/tools/client/examples/get-trigger-examples' +import { SummarizeClientTool } from '@/lib/copilot/tools/client/examples/summarize' +import { KnowledgeBaseClientTool } from '@/lib/copilot/tools/client/knowledge/knowledge-base' +import { + getClientTool, + registerClientTool, + registerToolStateSync, +} from '@/lib/copilot/tools/client/manager' +import { NavigateUIClientTool } from '@/lib/copilot/tools/client/navigation/navigate-ui' +import { AuthClientTool } from '@/lib/copilot/tools/client/other/auth' +import { CheckoffTodoClientTool } from '@/lib/copilot/tools/client/other/checkoff-todo' +import { CrawlWebsiteClientTool } from '@/lib/copilot/tools/client/other/crawl-website' +import { CustomToolClientTool } from '@/lib/copilot/tools/client/other/custom-tool' +import { DebugClientTool } from '@/lib/copilot/tools/client/other/debug' +import { DeployClientTool } from '@/lib/copilot/tools/client/other/deploy' +import { EditClientTool } from '@/lib/copilot/tools/client/other/edit' +import { EvaluateClientTool } from '@/lib/copilot/tools/client/other/evaluate' +import { GetPageContentsClientTool } from '@/lib/copilot/tools/client/other/get-page-contents' +import { InfoClientTool } from '@/lib/copilot/tools/client/other/info' +import { KnowledgeClientTool } from '@/lib/copilot/tools/client/other/knowledge' +import { MakeApiRequestClientTool } from '@/lib/copilot/tools/client/other/make-api-request' +import { MarkTodoInProgressClientTool } from '@/lib/copilot/tools/client/other/mark-todo-in-progress' +import { OAuthRequestAccessClientTool } from '@/lib/copilot/tools/client/other/oauth-request-access' +import { PlanClientTool } from '@/lib/copilot/tools/client/other/plan' +import { RememberDebugClientTool } from '@/lib/copilot/tools/client/other/remember-debug' +import { ResearchClientTool } from '@/lib/copilot/tools/client/other/research' +import { ScrapePageClientTool } from '@/lib/copilot/tools/client/other/scrape-page' +import { SearchDocumentationClientTool } from '@/lib/copilot/tools/client/other/search-documentation' +import { SearchErrorsClientTool } from '@/lib/copilot/tools/client/other/search-errors' +import { SearchLibraryDocsClientTool } from '@/lib/copilot/tools/client/other/search-library-docs' +import { SearchOnlineClientTool } from '@/lib/copilot/tools/client/other/search-online' +import { SearchPatternsClientTool } from '@/lib/copilot/tools/client/other/search-patterns' +import { SleepClientTool } from '@/lib/copilot/tools/client/other/sleep' +import { TestClientTool } from '@/lib/copilot/tools/client/other/test' +import { TourClientTool } from '@/lib/copilot/tools/client/other/tour' +import { WorkflowClientTool } from '@/lib/copilot/tools/client/other/workflow' +import { createExecutionContext, getTool } from '@/lib/copilot/tools/client/registry' +import { GetCredentialsClientTool } from '@/lib/copilot/tools/client/user/get-credentials' +import { SetEnvironmentVariablesClientTool } from '@/lib/copilot/tools/client/user/set-environment-variables' +import { CheckDeploymentStatusClientTool } from '@/lib/copilot/tools/client/workflow/check-deployment-status' +import { CreateWorkspaceMcpServerClientTool } from '@/lib/copilot/tools/client/workflow/create-workspace-mcp-server' +import { DeployApiClientTool } from '@/lib/copilot/tools/client/workflow/deploy-api' +import { DeployChatClientTool } from '@/lib/copilot/tools/client/workflow/deploy-chat' +import { DeployMcpClientTool } from '@/lib/copilot/tools/client/workflow/deploy-mcp' +import { EditWorkflowClientTool } from '@/lib/copilot/tools/client/workflow/edit-workflow' +import { GetBlockOutputsClientTool } from '@/lib/copilot/tools/client/workflow/get-block-outputs' +import { GetBlockUpstreamReferencesClientTool } from '@/lib/copilot/tools/client/workflow/get-block-upstream-references' +import { GetUserWorkflowClientTool } from '@/lib/copilot/tools/client/workflow/get-user-workflow' +import { GetWorkflowConsoleClientTool } from '@/lib/copilot/tools/client/workflow/get-workflow-console' +import { GetWorkflowDataClientTool } from '@/lib/copilot/tools/client/workflow/get-workflow-data' +import { GetWorkflowFromNameClientTool } from '@/lib/copilot/tools/client/workflow/get-workflow-from-name' +import { ListUserWorkflowsClientTool } from '@/lib/copilot/tools/client/workflow/list-user-workflows' +import { ListWorkspaceMcpServersClientTool } from '@/lib/copilot/tools/client/workflow/list-workspace-mcp-servers' +import { ManageCustomToolClientTool } from '@/lib/copilot/tools/client/workflow/manage-custom-tool' +import { ManageMcpToolClientTool } from '@/lib/copilot/tools/client/workflow/manage-mcp-tool' +import { RedeployClientTool } from '@/lib/copilot/tools/client/workflow/redeploy' +import { RunWorkflowClientTool } from '@/lib/copilot/tools/client/workflow/run-workflow' +import { SetGlobalWorkflowVariablesClientTool } from '@/lib/copilot/tools/client/workflow/set-global-workflow-variables' +import { getQueryClient } from '@/app/_shell/providers/query-provider' +import { subscriptionKeys } from '@/hooks/queries/subscription' +import type { + ChatContext, + CopilotMessage, + CopilotStore, + CopilotToolCall, + MessageFileAttachment, +} from '@/stores/panel/copilot/types' +import { useWorkflowDiffStore } from '@/stores/workflow-diff/store' +import { useSubBlockStore } from '@/stores/workflows/subblock/store' +import { mergeSubblockState } from '@/stores/workflows/utils' +import { useWorkflowStore } from '@/stores/workflows/workflow/store' +import type { WorkflowState } from '@/stores/workflows/workflow/types' + +const logger = createLogger('CopilotStore') + +// On module load, clear any lingering diff preview (fresh page refresh) +try { + const diffStore = useWorkflowDiffStore.getState() + if (diffStore?.hasActiveDiff) { + diffStore.clearDiff() + } +} catch {} + +// Known class-based client tools: map tool name -> instantiator +const CLIENT_TOOL_INSTANTIATORS: Record any> = { + plan: (id) => new PlanClientTool(id), + edit: (id) => new EditClientTool(id), + debug: (id) => new DebugClientTool(id), + test: (id) => new TestClientTool(id), + deploy: (id) => new DeployClientTool(id), + evaluate: (id) => new EvaluateClientTool(id), + auth: (id) => new AuthClientTool(id), + research: (id) => new ResearchClientTool(id), + knowledge: (id) => new KnowledgeClientTool(id), + custom_tool: (id) => new CustomToolClientTool(id), + tour: (id) => new TourClientTool(id), + info: (id) => new InfoClientTool(id), + workflow: (id) => new WorkflowClientTool(id), + run_workflow: (id) => new RunWorkflowClientTool(id), + get_workflow_console: (id) => new GetWorkflowConsoleClientTool(id), + get_blocks_and_tools: (id) => new GetBlocksAndToolsClientTool(id), + get_blocks_metadata: (id) => new GetBlocksMetadataClientTool(id), + get_block_options: (id) => new GetBlockOptionsClientTool(id), + get_block_config: (id) => new GetBlockConfigClientTool(id), + get_trigger_blocks: (id) => new GetTriggerBlocksClientTool(id), + search_online: (id) => new SearchOnlineClientTool(id), + search_documentation: (id) => new SearchDocumentationClientTool(id), + search_library_docs: (id) => new SearchLibraryDocsClientTool(id), + search_patterns: (id) => new SearchPatternsClientTool(id), + search_errors: (id) => new SearchErrorsClientTool(id), + scrape_page: (id) => new ScrapePageClientTool(id), + get_page_contents: (id) => new GetPageContentsClientTool(id), + crawl_website: (id) => new CrawlWebsiteClientTool(id), + remember_debug: (id) => new RememberDebugClientTool(id), + set_environment_variables: (id) => new SetEnvironmentVariablesClientTool(id), + get_credentials: (id) => new GetCredentialsClientTool(id), + knowledge_base: (id) => new KnowledgeBaseClientTool(id), + make_api_request: (id) => new MakeApiRequestClientTool(id), + checkoff_todo: (id) => new CheckoffTodoClientTool(id), + mark_todo_in_progress: (id) => new MarkTodoInProgressClientTool(id), + oauth_request_access: (id) => new OAuthRequestAccessClientTool(id), + edit_workflow: (id) => new EditWorkflowClientTool(id), + get_user_workflow: (id) => new GetUserWorkflowClientTool(id), + list_user_workflows: (id) => new ListUserWorkflowsClientTool(id), + get_workflow_from_name: (id) => new GetWorkflowFromNameClientTool(id), + get_workflow_data: (id) => new GetWorkflowDataClientTool(id), + set_global_workflow_variables: (id) => new SetGlobalWorkflowVariablesClientTool(id), + get_trigger_examples: (id) => new GetTriggerExamplesClientTool(id), + get_examples_rag: (id) => new GetExamplesRagClientTool(id), + get_operations_examples: (id) => new GetOperationsExamplesClientTool(id), + summarize_conversation: (id) => new SummarizeClientTool(id), + deploy_api: (id) => new DeployApiClientTool(id), + deploy_chat: (id) => new DeployChatClientTool(id), + deploy_mcp: (id) => new DeployMcpClientTool(id), + redeploy: (id) => new RedeployClientTool(id), + list_workspace_mcp_servers: (id) => new ListWorkspaceMcpServersClientTool(id), + create_workspace_mcp_server: (id) => new CreateWorkspaceMcpServerClientTool(id), + check_deployment_status: (id) => new CheckDeploymentStatusClientTool(id), + navigate_ui: (id) => new NavigateUIClientTool(id), + manage_custom_tool: (id) => new ManageCustomToolClientTool(id), + manage_mcp_tool: (id) => new ManageMcpToolClientTool(id), + sleep: (id) => new SleepClientTool(id), + get_block_outputs: (id) => new GetBlockOutputsClientTool(id), + get_block_upstream_references: (id) => new GetBlockUpstreamReferencesClientTool(id), +} + +// Read-only static metadata for class-based tools (no instances) +export const CLASS_TOOL_METADATA: Record = { + plan: (PlanClientTool as any)?.metadata, + edit: (EditClientTool as any)?.metadata, + debug: (DebugClientTool as any)?.metadata, + test: (TestClientTool as any)?.metadata, + deploy: (DeployClientTool as any)?.metadata, + evaluate: (EvaluateClientTool as any)?.metadata, + auth: (AuthClientTool as any)?.metadata, + research: (ResearchClientTool as any)?.metadata, + knowledge: (KnowledgeClientTool as any)?.metadata, + custom_tool: (CustomToolClientTool as any)?.metadata, + tour: (TourClientTool as any)?.metadata, + info: (InfoClientTool as any)?.metadata, + workflow: (WorkflowClientTool as any)?.metadata, + run_workflow: (RunWorkflowClientTool as any)?.metadata, + get_workflow_console: (GetWorkflowConsoleClientTool as any)?.metadata, + get_blocks_and_tools: (GetBlocksAndToolsClientTool as any)?.metadata, + get_blocks_metadata: (GetBlocksMetadataClientTool as any)?.metadata, + get_block_options: (GetBlockOptionsClientTool as any)?.metadata, + get_block_config: (GetBlockConfigClientTool as any)?.metadata, + get_trigger_blocks: (GetTriggerBlocksClientTool as any)?.metadata, + search_online: (SearchOnlineClientTool as any)?.metadata, + search_documentation: (SearchDocumentationClientTool as any)?.metadata, + search_library_docs: (SearchLibraryDocsClientTool as any)?.metadata, + search_patterns: (SearchPatternsClientTool as any)?.metadata, + search_errors: (SearchErrorsClientTool as any)?.metadata, + scrape_page: (ScrapePageClientTool as any)?.metadata, + get_page_contents: (GetPageContentsClientTool as any)?.metadata, + crawl_website: (CrawlWebsiteClientTool as any)?.metadata, + remember_debug: (RememberDebugClientTool as any)?.metadata, + set_environment_variables: (SetEnvironmentVariablesClientTool as any)?.metadata, + get_credentials: (GetCredentialsClientTool as any)?.metadata, + knowledge_base: (KnowledgeBaseClientTool as any)?.metadata, + make_api_request: (MakeApiRequestClientTool as any)?.metadata, + checkoff_todo: (CheckoffTodoClientTool as any)?.metadata, + mark_todo_in_progress: (MarkTodoInProgressClientTool as any)?.metadata, + edit_workflow: (EditWorkflowClientTool as any)?.metadata, + get_user_workflow: (GetUserWorkflowClientTool as any)?.metadata, + list_user_workflows: (ListUserWorkflowsClientTool as any)?.metadata, + get_workflow_from_name: (GetWorkflowFromNameClientTool as any)?.metadata, + get_workflow_data: (GetWorkflowDataClientTool as any)?.metadata, + set_global_workflow_variables: (SetGlobalWorkflowVariablesClientTool as any)?.metadata, + get_trigger_examples: (GetTriggerExamplesClientTool as any)?.metadata, + get_examples_rag: (GetExamplesRagClientTool as any)?.metadata, + oauth_request_access: (OAuthRequestAccessClientTool as any)?.metadata, + get_operations_examples: (GetOperationsExamplesClientTool as any)?.metadata, + summarize_conversation: (SummarizeClientTool as any)?.metadata, + deploy_api: (DeployApiClientTool as any)?.metadata, + deploy_chat: (DeployChatClientTool as any)?.metadata, + deploy_mcp: (DeployMcpClientTool as any)?.metadata, + redeploy: (RedeployClientTool as any)?.metadata, + list_workspace_mcp_servers: (ListWorkspaceMcpServersClientTool as any)?.metadata, + create_workspace_mcp_server: (CreateWorkspaceMcpServerClientTool as any)?.metadata, + check_deployment_status: (CheckDeploymentStatusClientTool as any)?.metadata, + navigate_ui: (NavigateUIClientTool as any)?.metadata, + manage_custom_tool: (ManageCustomToolClientTool as any)?.metadata, + manage_mcp_tool: (ManageMcpToolClientTool as any)?.metadata, + sleep: (SleepClientTool as any)?.metadata, + get_block_outputs: (GetBlockOutputsClientTool as any)?.metadata, + get_block_upstream_references: (GetBlockUpstreamReferencesClientTool as any)?.metadata, +} + +function ensureClientToolInstance(toolName: string | undefined, toolCallId: string | undefined) { + try { + if (!toolName || !toolCallId) return + if (getClientTool(toolCallId)) return + const make = CLIENT_TOOL_INSTANTIATORS[toolName] + if (make) { + const inst = make(toolCallId) + registerClientTool(toolCallId, inst) + } + } catch {} +} + +// Constants +const TEXT_BLOCK_TYPE = 'text' +const THINKING_BLOCK_TYPE = 'thinking' +const DATA_PREFIX = 'data: ' +const DATA_PREFIX_LENGTH = 6 +const CONTINUE_OPTIONS_TAG = '{"1":"Continue"}' + +// Resolve display text/icon for a tool based on its state +function resolveToolDisplay( + toolName: string | undefined, + state: ClientToolCallState, + toolCallId?: string, + params?: Record +): ClientToolDisplay | undefined { + try { + if (!toolName) return undefined + const def = getTool(toolName) as any + const toolMetadata = def?.metadata || CLASS_TOOL_METADATA[toolName] + const meta = toolMetadata?.displayNames || {} + + // Exact state first + const ds = meta?.[state] + if (ds?.text || ds?.icon) { + // Check if tool has a dynamic text formatter + const getDynamicText = toolMetadata?.getDynamicText + if (getDynamicText && params) { + try { + const dynamicText = getDynamicText(params, state) + if (dynamicText) { + return { text: dynamicText, icon: ds.icon } + } + } catch (e) { + // Fall back to static text if formatter fails + } + } + return { text: ds.text, icon: ds.icon } + } + + // Fallback order (prefer pre-execution states for unknown states like pending) + const fallbackOrder: ClientToolCallState[] = [ + (ClientToolCallState as any).generating, + (ClientToolCallState as any).executing, + (ClientToolCallState as any).review, + (ClientToolCallState as any).success, + (ClientToolCallState as any).error, + (ClientToolCallState as any).rejected, + ] + for (const key of fallbackOrder) { + const cand = meta?.[key] + if (cand?.text || cand?.icon) return { text: cand.text, icon: cand.icon } + } + } catch {} + // Humanized fallback as last resort - include state verb for proper verb-noun styling + try { + if (toolName) { + const formattedName = toolName.replace(/_/g, ' ').replace(/\b\w/g, (c) => c.toUpperCase()) + // Add state verb prefix for verb-noun rendering in tool-call component + let stateVerb: string + switch (state) { + case ClientToolCallState.pending: + case ClientToolCallState.executing: + stateVerb = 'Executing' + break + case ClientToolCallState.success: + stateVerb = 'Executed' + break + case ClientToolCallState.error: + stateVerb = 'Failed' + break + case ClientToolCallState.rejected: + case ClientToolCallState.aborted: + stateVerb = 'Skipped' + break + default: + stateVerb = 'Executing' + } + return { text: `${stateVerb} ${formattedName}`, icon: undefined as any } + } + } catch {} + return undefined +} + +// Helper: check if a tool state is rejected +function isRejectedState(state: any): boolean { + try { + return state === 'rejected' || state === (ClientToolCallState as any).rejected + } catch { + return state === 'rejected' + } +} + +// Helper: check if a tool state is review (terminal for build/edit preview) +function isReviewState(state: any): boolean { + try { + return state === 'review' || state === (ClientToolCallState as any).review + } catch { + return state === 'review' + } +} + +// Helper: check if a tool state is background (terminal) +function isBackgroundState(state: any): boolean { + try { + return state === 'background' || state === (ClientToolCallState as any).background + } catch { + return state === 'background' + } +} + +/** + * Checks if a tool call state is terminal (success, error, rejected, aborted, review, or background) + */ +function isTerminalState(state: any): boolean { + return ( + state === ClientToolCallState.success || + state === ClientToolCallState.error || + state === ClientToolCallState.rejected || + state === ClientToolCallState.aborted || + isReviewState(state) || + isBackgroundState(state) + ) +} + +// Helper: abort all in-progress client tools and update inline blocks +function abortAllInProgressTools(set: any, get: () => CopilotStore) { + try { + const { toolCallsById, messages } = get() + const updatedMap = { ...toolCallsById } + const abortedIds = new Set() + let hasUpdates = false + for (const [id, tc] of Object.entries(toolCallsById)) { + const st = tc.state as any + // Abort anything not already terminal success/error/rejected/aborted + const isTerminal = + st === ClientToolCallState.success || + st === ClientToolCallState.error || + st === ClientToolCallState.rejected || + st === ClientToolCallState.aborted + if (!isTerminal || isReviewState(st)) { + abortedIds.add(id) + updatedMap[id] = { + ...tc, + state: ClientToolCallState.aborted, + subAgentStreaming: false, + display: resolveToolDisplay(tc.name, ClientToolCallState.aborted, id, (tc as any).params), + } + hasUpdates = true + } else if (tc.subAgentStreaming) { + updatedMap[id] = { + ...tc, + subAgentStreaming: false, + } + hasUpdates = true + } + } + if (abortedIds.size > 0 || hasUpdates) { + set({ toolCallsById: updatedMap }) + // Update inline blocks in-place for the latest assistant message only (most relevant) + set((s: CopilotStore) => { + const msgs = [...s.messages] + for (let mi = msgs.length - 1; mi >= 0; mi--) { + const m = msgs[mi] as any + if (m.role !== 'assistant' || !Array.isArray(m.contentBlocks)) continue + let changed = false + const blocks = m.contentBlocks.map((b: any) => { + if (b?.type === 'tool_call' && b.toolCall?.id && abortedIds.has(b.toolCall.id)) { + changed = true + const prev = b.toolCall + return { + ...b, + toolCall: { + ...prev, + state: ClientToolCallState.aborted, + display: resolveToolDisplay( + prev?.name, + ClientToolCallState.aborted, + prev?.id, + prev?.params + ), + }, + } + } + return b + }) + if (changed) { + msgs[mi] = { ...m, contentBlocks: blocks } + break + } + } + return { messages: msgs } + }) + } + } catch {} +} + +// Normalize loaded messages so assistant messages render correctly from DB +/** + * Loads messages from DB for UI rendering. + * Messages are stored exactly as they render, so we just need to: + * 1. Register client tool instances for any tool calls + * 2. Clear any streaming flags (messages loaded from DB are never actively streaming) + * 3. Return the messages + */ +function normalizeMessagesForUI(messages: CopilotMessage[]): CopilotMessage[] { + try { + // Log what we're loading + for (const message of messages) { + if (message.role === 'assistant') { + logger.info('[normalizeMessagesForUI] Loading assistant message', { + id: message.id, + hasContent: !!message.content?.trim(), + contentBlockCount: message.contentBlocks?.length || 0, + contentBlockTypes: (message.contentBlocks as any[])?.map((b) => b?.type) || [], + }) + } + } + + // Register client tool instances and clear streaming flags for all tool calls + for (const message of messages) { + if (message.contentBlocks) { + for (const block of message.contentBlocks as any[]) { + if (block?.type === 'tool_call' && block.toolCall) { + registerToolCallInstances(block.toolCall) + clearStreamingFlags(block.toolCall) + } + } + } + // Also clear from toolCalls array (legacy format) + if (message.toolCalls) { + for (const toolCall of message.toolCalls) { + clearStreamingFlags(toolCall) + } + } + } + return messages + } catch { + return messages + } +} + +/** + * Recursively clears streaming flags from a tool call and its nested subagent tool calls. + * This ensures messages loaded from DB don't appear to be streaming. + */ +function clearStreamingFlags(toolCall: any): void { + if (!toolCall) return + + // Always set subAgentStreaming to false - messages loaded from DB are never streaming + toolCall.subAgentStreaming = false + + // Clear nested subagent tool calls + if (Array.isArray(toolCall.subAgentBlocks)) { + for (const block of toolCall.subAgentBlocks) { + if (block?.type === 'subagent_tool_call' && block.toolCall) { + clearStreamingFlags(block.toolCall) + } + } + } + if (Array.isArray(toolCall.subAgentToolCalls)) { + for (const subTc of toolCall.subAgentToolCalls) { + clearStreamingFlags(subTc) + } + } +} + +/** + * Recursively registers client tool instances for a tool call and its nested subagent tool calls. + */ +function registerToolCallInstances(toolCall: any): void { + if (!toolCall?.id) return + ensureClientToolInstance(toolCall.name, toolCall.id) + + // Register nested subagent tool calls + if (Array.isArray(toolCall.subAgentBlocks)) { + for (const block of toolCall.subAgentBlocks) { + if (block?.type === 'subagent_tool_call' && block.toolCall) { + registerToolCallInstances(block.toolCall) + } + } + } + if (Array.isArray(toolCall.subAgentToolCalls)) { + for (const subTc of toolCall.subAgentToolCalls) { + registerToolCallInstances(subTc) + } + } +} + +// Simple object pool for content blocks +class ObjectPool { + private pool: T[] = [] + private createFn: () => T + private resetFn: (obj: T) => void + + constructor(createFn: () => T, resetFn: (obj: T) => void, initialSize = 5) { + this.createFn = createFn + this.resetFn = resetFn + for (let i = 0; i < initialSize; i++) this.pool.push(createFn()) + } + get(): T { + const obj = this.pool.pop() + if (obj) { + this.resetFn(obj) + return obj + } + return this.createFn() + } + release(obj: T): void { + if (this.pool.length < 20) this.pool.push(obj) + } +} + +const contentBlockPool = new ObjectPool( + () => ({ type: '', content: '', timestamp: 0, toolCall: null as any }), + (obj) => { + obj.type = '' + obj.content = '' + obj.timestamp = 0 + ;(obj as any).toolCall = null + ;(obj as any).startTime = undefined + ;(obj as any).duration = undefined + } +) + +// Efficient string builder +class StringBuilder { + private parts: string[] = [] + private length = 0 + append(str: string): void { + this.parts.push(str) + this.length += str.length + } + toString(): string { + const result = this.parts.join('') + this.clear() + return result + } + clear(): void { + this.parts.length = 0 + this.length = 0 + } + get size(): number { + return this.length + } +} + +// Helpers +function createUserMessage( + content: string, + fileAttachments?: MessageFileAttachment[], + contexts?: ChatContext[], + messageId?: string +): CopilotMessage { + return { + id: messageId || crypto.randomUUID(), + role: 'user', + content, + timestamp: new Date().toISOString(), + ...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }), + ...(contexts && contexts.length > 0 && { contexts }), + ...(contexts && + contexts.length > 0 && { + contentBlocks: [ + { type: 'contexts', contexts: contexts as any, timestamp: Date.now() }, + ] as any, + }), + } +} + +function createStreamingMessage(): CopilotMessage { + return { + id: crypto.randomUUID(), + role: 'assistant', + content: '', + timestamp: new Date().toISOString(), + } +} + +function createErrorMessage( + messageId: string, + content: string, + errorType?: 'usage_limit' | 'unauthorized' | 'forbidden' | 'rate_limit' | 'upgrade_required' +): CopilotMessage { + return { + id: messageId, + role: 'assistant', + content, + timestamp: new Date().toISOString(), + contentBlocks: [ + { + type: 'text', + content, + timestamp: Date.now(), + }, + ], + errorType, + } +} + +/** + * Builds a workflow snapshot suitable for checkpoint persistence. + */ +function buildCheckpointWorkflowState(workflowId: string): WorkflowState | null { + const rawState = useWorkflowStore.getState().getWorkflowState() + if (!rawState) return null + + const blocksWithSubblockValues = mergeSubblockState(rawState.blocks, workflowId) + + const filteredBlocks = Object.entries(blocksWithSubblockValues).reduce( + (acc, [blockId, block]) => { + if (block?.type && block?.name) { + acc[blockId] = { + ...block, + id: block.id || blockId, + enabled: block.enabled !== undefined ? block.enabled : true, + horizontalHandles: block.horizontalHandles !== undefined ? block.horizontalHandles : true, + height: block.height !== undefined ? block.height : 90, + subBlocks: block.subBlocks || {}, + outputs: block.outputs || {}, + data: block.data || {}, + position: block.position || { x: 0, y: 0 }, + } + } + return acc + }, + {} as WorkflowState['blocks'] + ) + + return { + blocks: filteredBlocks, + edges: rawState.edges || [], + loops: rawState.loops || {}, + parallels: rawState.parallels || {}, + lastSaved: rawState.lastSaved || Date.now(), + deploymentStatuses: rawState.deploymentStatuses || {}, + } +} + +/** + * Persists a previously captured snapshot as a workflow checkpoint. + */ +async function saveMessageCheckpoint( + messageId: string, + get: () => CopilotStore, + set: (partial: Partial | ((state: CopilotStore) => Partial)) => void +): Promise { + const { workflowId, currentChat, messageSnapshots, messageCheckpoints } = get() + if (!workflowId || !currentChat?.id) return false + + const snapshot = messageSnapshots[messageId] + if (!snapshot) return false + + const nextSnapshots = { ...messageSnapshots } + delete nextSnapshots[messageId] + set({ messageSnapshots: nextSnapshots }) + + try { + const response = await fetch('/api/copilot/checkpoints', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + workflowId, + chatId: currentChat.id, + messageId, + workflowState: JSON.stringify(snapshot), + }), + }) + + if (!response.ok) { + throw new Error(`Failed to create checkpoint: ${response.statusText}`) + } + + const result = await response.json() + const newCheckpoint = result.checkpoint + if (newCheckpoint) { + const existingCheckpoints = messageCheckpoints[messageId] || [] + const updatedCheckpoints = { + ...messageCheckpoints, + [messageId]: [newCheckpoint, ...existingCheckpoints], + } + set({ messageCheckpoints: updatedCheckpoints }) + } + + return true + } catch (error) { + logger.error('Failed to create checkpoint from snapshot:', error) + return false + } +} + +function stripTodoTags(text: string): string { + if (!text) return text + return text + .replace(/[\s\S]*?<\/marktodo>/g, '') + .replace(/[\s\S]*?<\/checkofftodo>/g, '') + .replace(/[\s\S]*?<\/design_workflow>/g, '') + .replace(/[ \t]+\n/g, '\n') + .replace(/\n{2,}/g, '\n') +} + +/** + * Deep clones an object using JSON serialization. + * This ensures we strip any non-serializable data (functions, circular refs). + */ +function deepClone(obj: T): T { + try { + const json = JSON.stringify(obj) + if (!json || json === 'undefined') { + logger.warn('[deepClone] JSON.stringify returned empty for object', { + type: typeof obj, + isArray: Array.isArray(obj), + length: Array.isArray(obj) ? obj.length : undefined, + }) + return obj + } + const parsed = JSON.parse(json) + // Verify the clone worked + if (Array.isArray(obj) && (!Array.isArray(parsed) || parsed.length !== obj.length)) { + logger.warn('[deepClone] Array clone mismatch', { + originalLength: obj.length, + clonedLength: Array.isArray(parsed) ? parsed.length : 'not array', + }) + } + return parsed + } catch (err) { + logger.error('[deepClone] Failed to clone object', { + error: String(err), + type: typeof obj, + isArray: Array.isArray(obj), + }) + return obj + } +} + +/** + * Recursively masks credential IDs in any value (string, object, or array). + * Used during serialization to ensure sensitive IDs are never persisted. + */ +function maskCredentialIdsInValue(value: any, credentialIds: Set): any { + if (!value || credentialIds.size === 0) return value + + if (typeof value === 'string') { + let masked = value + // Sort by length descending to mask longer IDs first + const sortedIds = Array.from(credentialIds).sort((a, b) => b.length - a.length) + for (const id of sortedIds) { + if (id && masked.includes(id)) { + masked = masked.split(id).join('••••••••') + } + } + return masked + } + + if (Array.isArray(value)) { + return value.map((item) => maskCredentialIdsInValue(item, credentialIds)) + } + + if (typeof value === 'object') { + const masked: any = {} + for (const key of Object.keys(value)) { + masked[key] = maskCredentialIdsInValue(value[key], credentialIds) + } + return masked + } + + return value +} + +/** + * Serializes messages for database storage. + * Deep clones all fields to ensure proper JSON serialization. + * Masks sensitive credential IDs before persisting. + * This ensures they render identically when loaded back. + */ +function serializeMessagesForDB(messages: CopilotMessage[]): any[] { + // Get credential IDs to mask + const credentialIds = useCopilotStore.getState().sensitiveCredentialIds + + const result = messages + .map((msg) => { + // Deep clone the entire message to ensure all nested data is serializable + // Ensure timestamp is always a string (Zod schema requires it) + let timestamp: string = msg.timestamp + if (typeof timestamp !== 'string') { + const ts = timestamp as any + timestamp = ts instanceof Date ? ts.toISOString() : new Date().toISOString() + } + + const serialized: any = { + id: msg.id, + role: msg.role, + content: msg.content || '', + timestamp, + } + + // Deep clone contentBlocks (the main rendering data) + if (Array.isArray(msg.contentBlocks) && msg.contentBlocks.length > 0) { + serialized.contentBlocks = deepClone(msg.contentBlocks) + } + + // Deep clone toolCalls + if (Array.isArray((msg as any).toolCalls) && (msg as any).toolCalls.length > 0) { + serialized.toolCalls = deepClone((msg as any).toolCalls) + } + + // Deep clone file attachments + if (Array.isArray(msg.fileAttachments) && msg.fileAttachments.length > 0) { + serialized.fileAttachments = deepClone(msg.fileAttachments) + } + + // Deep clone contexts + if (Array.isArray((msg as any).contexts) && (msg as any).contexts.length > 0) { + serialized.contexts = deepClone((msg as any).contexts) + } + + // Deep clone citations + if (Array.isArray(msg.citations) && msg.citations.length > 0) { + serialized.citations = deepClone(msg.citations) + } + + // Copy error type + if (msg.errorType) { + serialized.errorType = msg.errorType + } + + // Mask credential IDs in the serialized message before persisting + return maskCredentialIdsInValue(serialized, credentialIds) + }) + .filter((msg) => { + // Filter out empty assistant messages + if (msg.role === 'assistant') { + const hasContent = typeof msg.content === 'string' && msg.content.trim().length > 0 + const hasTools = Array.isArray(msg.toolCalls) && msg.toolCalls.length > 0 + const hasBlocks = Array.isArray(msg.contentBlocks) && msg.contentBlocks.length > 0 + return hasContent || hasTools || hasBlocks + } + return true + }) + + // Log what we're serializing + for (const msg of messages) { + if (msg.role === 'assistant') { + logger.info('[serializeMessagesForDB] Input assistant message', { + id: msg.id, + hasContent: !!msg.content?.trim(), + contentBlockCount: msg.contentBlocks?.length || 0, + contentBlockTypes: (msg.contentBlocks as any[])?.map((b) => b?.type) || [], + }) + } + } + + logger.info('[serializeMessagesForDB] Serialized messages', { + inputCount: messages.length, + outputCount: result.length, + sample: + result.length > 0 + ? { + role: result[result.length - 1].role, + hasContent: !!result[result.length - 1].content, + contentBlockCount: result[result.length - 1].contentBlocks?.length || 0, + toolCallCount: result[result.length - 1].toolCalls?.length || 0, + } + : null, + }) + + return result +} + +/** + * @deprecated Use serializeMessagesForDB instead. + */ +function validateMessagesForLLM(messages: CopilotMessage[]): any[] { + return serializeMessagesForDB(messages) +} + +/** + * Extracts all tool calls from a toolCall object, including nested subAgentBlocks. + * Adds them to the provided map. + */ +function extractToolCallsRecursively( + toolCall: CopilotToolCall, + map: Record +): void { + if (!toolCall?.id) return + map[toolCall.id] = toolCall + + // Extract nested tool calls from subAgentBlocks + if (Array.isArray(toolCall.subAgentBlocks)) { + for (const block of toolCall.subAgentBlocks) { + if (block?.type === 'subagent_tool_call' && block.toolCall?.id) { + extractToolCallsRecursively(block.toolCall, map) + } + } + } + + // Extract from subAgentToolCalls as well + if (Array.isArray(toolCall.subAgentToolCalls)) { + for (const subTc of toolCall.subAgentToolCalls) { + extractToolCallsRecursively(subTc, map) + } + } +} + +/** + * Builds a complete toolCallsById map from normalized messages. + * Extracts all tool calls including nested subagent tool calls. + */ +function buildToolCallsById(messages: CopilotMessage[]): Record { + const toolCallsById: Record = {} + for (const msg of messages) { + if (msg.contentBlocks) { + for (const block of msg.contentBlocks as any[]) { + if (block?.type === 'tool_call' && block.toolCall?.id) { + extractToolCallsRecursively(block.toolCall, toolCallsById) + } + } + } + } + return toolCallsById +} + +// Streaming context and SSE parsing +interface StreamingContext { + messageId: string + accumulatedContent: StringBuilder + contentBlocks: any[] + currentTextBlock: any | null + isInThinkingBlock: boolean + currentThinkingBlock: any | null + isInDesignWorkflowBlock: boolean + designWorkflowContent: string + pendingContent: string + newChatId?: string + doneEventCount: number + streamComplete?: boolean + wasAborted?: boolean + suppressContinueOption?: boolean + /** Track active subagent sessions by parent tool call ID */ + subAgentParentToolCallId?: string + /** Track subagent content per parent tool call */ + subAgentContent: Record + /** Track subagent tool calls per parent tool call */ + subAgentToolCalls: Record + /** Track subagent streaming blocks per parent tool call */ + subAgentBlocks: Record +} + +type SSEHandler = ( + data: any, + context: StreamingContext, + get: () => CopilotStore, + set: any +) => Promise | void + +function appendTextBlock(context: StreamingContext, text: string) { + if (!text) return + context.accumulatedContent.append(text) + if (context.currentTextBlock && context.contentBlocks.length > 0) { + const lastBlock = context.contentBlocks[context.contentBlocks.length - 1] + if (lastBlock.type === TEXT_BLOCK_TYPE && lastBlock === context.currentTextBlock) { + lastBlock.content += text + return + } + } + context.currentTextBlock = contentBlockPool.get() + context.currentTextBlock.type = TEXT_BLOCK_TYPE + context.currentTextBlock.content = text + context.currentTextBlock.timestamp = Date.now() + context.contentBlocks.push(context.currentTextBlock) +} + +function appendContinueOption(content: string): string { + if (//i.test(content)) return content + const suffix = content.trim().length > 0 ? '\n\n' : '' + return `${content}${suffix}${CONTINUE_OPTIONS_TAG}` +} + +function appendContinueOptionBlock(blocks: any[]): any[] { + if (!Array.isArray(blocks)) return blocks + const hasOptions = blocks.some( + (block) => + block?.type === TEXT_BLOCK_TYPE && + typeof block.content === 'string' && + //i.test(block.content) + ) + if (hasOptions) return blocks + return [ + ...blocks, + { + type: TEXT_BLOCK_TYPE, + content: CONTINUE_OPTIONS_TAG, + timestamp: Date.now(), + }, + ] +} + +function beginThinkingBlock(context: StreamingContext) { + if (!context.currentThinkingBlock) { + context.currentThinkingBlock = contentBlockPool.get() + context.currentThinkingBlock.type = THINKING_BLOCK_TYPE + context.currentThinkingBlock.content = '' + context.currentThinkingBlock.timestamp = Date.now() + ;(context.currentThinkingBlock as any).startTime = Date.now() + context.contentBlocks.push(context.currentThinkingBlock) + } + context.isInThinkingBlock = true + context.currentTextBlock = null +} + +/** + * Removes thinking tags (raw or escaped) from streamed content. + */ +function stripThinkingTags(text: string): string { + return text.replace(/<\/?thinking[^>]*>/gi, '').replace(/<\/?thinking[^&]*>/gi, '') +} + +function appendThinkingContent(context: StreamingContext, text: string) { + if (!text) return + const cleanedText = stripThinkingTags(text) + if (!cleanedText) return + if (context.currentThinkingBlock) { + context.currentThinkingBlock.content += cleanedText + } else { + context.currentThinkingBlock = contentBlockPool.get() + context.currentThinkingBlock.type = THINKING_BLOCK_TYPE + context.currentThinkingBlock.content = cleanedText + context.currentThinkingBlock.timestamp = Date.now() + context.currentThinkingBlock.startTime = Date.now() + context.contentBlocks.push(context.currentThinkingBlock) + } + context.isInThinkingBlock = true + context.currentTextBlock = null +} + +function finalizeThinkingBlock(context: StreamingContext) { + if (context.currentThinkingBlock) { + context.currentThinkingBlock.duration = + Date.now() - (context.currentThinkingBlock.startTime || Date.now()) + } + context.isInThinkingBlock = false + context.currentThinkingBlock = null + context.currentTextBlock = null +} + +function upsertToolCallBlock(context: StreamingContext, toolCall: CopilotToolCall) { + let found = false + for (let i = 0; i < context.contentBlocks.length; i++) { + const b = context.contentBlocks[i] as any + if (b.type === 'tool_call' && b.toolCall?.id === toolCall.id) { + context.contentBlocks[i] = { ...b, toolCall } + found = true + break + } + } + if (!found) { + context.contentBlocks.push({ type: 'tool_call', toolCall, timestamp: Date.now() }) + } +} + +function appendSubAgentText(context: StreamingContext, parentToolCallId: string, text: string) { + if (!context.subAgentContent[parentToolCallId]) { + context.subAgentContent[parentToolCallId] = '' + } + if (!context.subAgentBlocks[parentToolCallId]) { + context.subAgentBlocks[parentToolCallId] = [] + } + context.subAgentContent[parentToolCallId] += text + const blocks = context.subAgentBlocks[parentToolCallId] + const lastBlock = blocks[blocks.length - 1] + if (lastBlock && lastBlock.type === 'subagent_text') { + lastBlock.content = (lastBlock.content || '') + text + } else { + blocks.push({ + type: 'subagent_text', + content: text, + timestamp: Date.now(), + }) + } +} + +const sseHandlers: Record = { + chat_id: async (data, context, get) => { + context.newChatId = data.chatId + const { currentChat } = get() + if (!currentChat && context.newChatId) { + await get().handleNewChatCreation(context.newChatId) + } + }, + title_updated: (_data, _context, get, set) => { + const title = _data.title + if (!title) return + const { currentChat, chats } = get() + if (currentChat) { + set({ + currentChat: { ...currentChat, title }, + chats: chats.map((c) => (c.id === currentChat.id ? { ...c, title } : c)), + }) + } + }, + tool_result: (data, context, get, set) => { + try { + const toolCallId: string | undefined = data?.toolCallId || data?.data?.id + const success: boolean | undefined = data?.success + const failedDependency: boolean = data?.failedDependency === true + const skipped: boolean = data?.result?.skipped === true + if (!toolCallId) return + const { toolCallsById } = get() + const current = toolCallsById[toolCallId] + if (current) { + if ( + isRejectedState(current.state) || + isReviewState(current.state) || + isBackgroundState(current.state) + ) { + // Preserve terminal review/rejected state; do not override + return + } + const targetState = success + ? ClientToolCallState.success + : failedDependency || skipped + ? ClientToolCallState.rejected + : ClientToolCallState.error + const updatedMap = { ...toolCallsById } + updatedMap[toolCallId] = { + ...current, + state: targetState, + display: resolveToolDisplay( + current.name, + targetState, + current.id, + (current as any).params + ), + } + set({ toolCallsById: updatedMap }) + + // If checkoff_todo succeeded, mark todo as completed in planTodos + if (targetState === ClientToolCallState.success && current.name === 'checkoff_todo') { + try { + const result = data?.result || data?.data?.result || {} + const input = (current as any).params || (current as any).input || {} + const todoId = input.id || input.todoId || result.id || result.todoId + if (todoId) { + get().updatePlanTodoStatus(todoId, 'completed') + } + } catch {} + } + + // If mark_todo_in_progress succeeded, set todo executing in planTodos + if ( + targetState === ClientToolCallState.success && + current.name === 'mark_todo_in_progress' + ) { + try { + const result = data?.result || data?.data?.result || {} + const input = (current as any).params || (current as any).input || {} + const todoId = input.id || input.todoId || result.id || result.todoId + if (todoId) { + get().updatePlanTodoStatus(todoId, 'executing') + } + } catch {} + } + } + + // Update inline content block state + for (let i = 0; i < context.contentBlocks.length; i++) { + const b = context.contentBlocks[i] as any + if (b?.type === 'tool_call' && b?.toolCall?.id === toolCallId) { + if ( + isRejectedState(b.toolCall?.state) || + isReviewState(b.toolCall?.state) || + isBackgroundState(b.toolCall?.state) + ) + break + const targetState = success + ? ClientToolCallState.success + : failedDependency || skipped + ? ClientToolCallState.rejected + : ClientToolCallState.error + context.contentBlocks[i] = { + ...b, + toolCall: { + ...b.toolCall, + state: targetState, + display: resolveToolDisplay( + b.toolCall?.name, + targetState, + toolCallId, + b.toolCall?.params + ), + }, + } + break + } + } + updateStreamingMessage(set, context) + } catch {} + }, + tool_error: (data, context, get, set) => { + try { + const toolCallId: string | undefined = data?.toolCallId || data?.data?.id + const failedDependency: boolean = data?.failedDependency === true + if (!toolCallId) return + const { toolCallsById } = get() + const current = toolCallsById[toolCallId] + if (current) { + if ( + isRejectedState(current.state) || + isReviewState(current.state) || + isBackgroundState(current.state) + ) { + return + } + const targetState = failedDependency + ? ClientToolCallState.rejected + : ClientToolCallState.error + const updatedMap = { ...toolCallsById } + updatedMap[toolCallId] = { + ...current, + state: targetState, + display: resolveToolDisplay( + current.name, + targetState, + current.id, + (current as any).params + ), + } + set({ toolCallsById: updatedMap }) + } + for (let i = 0; i < context.contentBlocks.length; i++) { + const b = context.contentBlocks[i] as any + if (b?.type === 'tool_call' && b?.toolCall?.id === toolCallId) { + if ( + isRejectedState(b.toolCall?.state) || + isReviewState(b.toolCall?.state) || + isBackgroundState(b.toolCall?.state) + ) + break + const targetState = failedDependency + ? ClientToolCallState.rejected + : ClientToolCallState.error + context.contentBlocks[i] = { + ...b, + toolCall: { + ...b.toolCall, + state: targetState, + display: resolveToolDisplay( + b.toolCall?.name, + targetState, + toolCallId, + b.toolCall?.params + ), + }, + } + break + } + } + updateStreamingMessage(set, context) + } catch {} + }, + tool_generating: (data, context, get, set) => { + const { toolCallId, toolName } = data + if (!toolCallId || !toolName) return + const { toolCallsById } = get() + + // Ensure class-based client tool instances are registered (for interrupts/display) + ensureClientToolInstance(toolName, toolCallId) + + if (!toolCallsById[toolCallId]) { + // Show as pending until we receive full tool_call (with arguments) to decide execution + const initialState = ClientToolCallState.pending + const tc: CopilotToolCall = { + id: toolCallId, + name: toolName, + state: initialState, + display: resolveToolDisplay(toolName, initialState, toolCallId), + } + const updated = { ...toolCallsById, [toolCallId]: tc } + set({ toolCallsById: updated }) + logger.info('[toolCallsById] map updated', updated) + + // Add/refresh inline content block + upsertToolCallBlock(context, tc) + updateStreamingMessage(set, context) + } + }, + tool_call: (data, context, get, set) => { + const toolData = data?.data || {} + const id: string | undefined = toolData.id || data?.toolCallId + const name: string | undefined = toolData.name || data?.toolName + if (!id) return + const args = toolData.arguments + const isPartial = toolData.partial === true + const { toolCallsById } = get() + + // Ensure class-based client tool instances are registered (for interrupts/display) + ensureClientToolInstance(name, id) + + const existing = toolCallsById[id] + const next: CopilotToolCall = existing + ? { + ...existing, + state: ClientToolCallState.pending, + ...(args ? { params: args } : {}), + display: resolveToolDisplay(name, ClientToolCallState.pending, id, args), + } + : { + id, + name: name || 'unknown_tool', + state: ClientToolCallState.pending, + ...(args ? { params: args } : {}), + display: resolveToolDisplay(name, ClientToolCallState.pending, id, args), + } + const updated = { ...toolCallsById, [id]: next } + set({ toolCallsById: updated }) + logger.info('[toolCallsById] → pending', { id, name, params: args }) + + // Ensure an inline content block exists/updated for this tool call + upsertToolCallBlock(context, next) + updateStreamingMessage(set, context) + + // Do not execute on partial tool_call frames + if (isPartial) { + return + } + + // Prefer interface-based registry to determine interrupt and execute + try { + const def = name ? getTool(name) : undefined + if (def) { + const hasInterrupt = + typeof def.hasInterrupt === 'function' + ? !!def.hasInterrupt(args || {}) + : !!def.hasInterrupt + // Check if tool is auto-allowed - if so, execute even if it has an interrupt + const { autoAllowedTools } = get() + const isAutoAllowed = name ? autoAllowedTools.includes(name) : false + if ((!hasInterrupt || isAutoAllowed) && typeof def.execute === 'function') { + if (isAutoAllowed && hasInterrupt) { + logger.info('[toolCallsById] Auto-executing tool with interrupt (auto-allowed)', { + id, + name, + }) + } + const ctx = createExecutionContext({ toolCallId: id, toolName: name || 'unknown_tool' }) + // Defer executing transition by a tick to let pending render + setTimeout(() => { + // Guard against duplicate execution - check if already executing or terminal + const currentState = get().toolCallsById[id]?.state + if (currentState === ClientToolCallState.executing || isTerminalState(currentState)) { + return + } + + const executingMap = { ...get().toolCallsById } + executingMap[id] = { + ...executingMap[id], + state: ClientToolCallState.executing, + display: resolveToolDisplay(name, ClientToolCallState.executing, id, args), + } + set({ toolCallsById: executingMap }) + logger.info('[toolCallsById] pending → executing (registry)', { id, name }) + + // Update inline content block to executing + for (let i = 0; i < context.contentBlocks.length; i++) { + const b = context.contentBlocks[i] as any + if (b.type === 'tool_call' && b.toolCall?.id === id) { + context.contentBlocks[i] = { + ...b, + toolCall: { ...b.toolCall, state: ClientToolCallState.executing }, + } + break + } + } + updateStreamingMessage(set, context) + + Promise.resolve() + .then(async () => { + const result = await def.execute(ctx, args || {}) + const success = + result && typeof result.status === 'number' + ? result.status >= 200 && result.status < 300 + : true + const completeMap = { ...get().toolCallsById } + // Do not override terminal review/rejected + if ( + isRejectedState(completeMap[id]?.state) || + isReviewState(completeMap[id]?.state) || + isBackgroundState(completeMap[id]?.state) + ) { + return + } + completeMap[id] = { + ...completeMap[id], + state: success ? ClientToolCallState.success : ClientToolCallState.error, + display: resolveToolDisplay( + name, + success ? ClientToolCallState.success : ClientToolCallState.error, + id, + args + ), + } + set({ toolCallsById: completeMap }) + logger.info( + `[toolCallsById] executing → ${success ? 'success' : 'error'} (registry)`, + { id, name } + ) + + // Notify backend tool mark-complete endpoint + try { + await fetch('/api/copilot/tools/mark-complete', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + id, + name: name || 'unknown_tool', + status: + typeof result?.status === 'number' ? result.status : success ? 200 : 500, + message: result?.message, + data: result?.data, + }), + }) + } catch {} + }) + .catch((e) => { + const errorMap = { ...get().toolCallsById } + // Do not override terminal review/rejected + if ( + isRejectedState(errorMap[id]?.state) || + isReviewState(errorMap[id]?.state) || + isBackgroundState(errorMap[id]?.state) + ) { + return + } + errorMap[id] = { + ...errorMap[id], + state: ClientToolCallState.error, + display: resolveToolDisplay(name, ClientToolCallState.error, id, args), + } + set({ toolCallsById: errorMap }) + logger.error('Registry auto-execute tool failed', { id, name, error: e }) + }) + }, 0) + return + } + } + } catch (e) { + logger.warn('tool_call registry auto-exec check failed', { id, name, error: e }) + } + + // Class-based auto-exec for non-interrupt tools or auto-allowed tools + try { + const inst = getClientTool(id) as any + const hasInterrupt = !!inst?.getInterruptDisplays?.() + // Check if tool is auto-allowed - if so, execute even if it has an interrupt + const { autoAllowedTools: classAutoAllowed } = get() + const isClassAutoAllowed = name ? classAutoAllowed.includes(name) : false + if ( + (!hasInterrupt || isClassAutoAllowed) && + (typeof inst?.execute === 'function' || typeof inst?.handleAccept === 'function') + ) { + if (isClassAutoAllowed && hasInterrupt) { + logger.info('[toolCallsById] Auto-executing class tool with interrupt (auto-allowed)', { + id, + name, + }) + } + setTimeout(() => { + // Guard against duplicate execution - check if already executing or terminal + const currentState = get().toolCallsById[id]?.state + if (currentState === ClientToolCallState.executing || isTerminalState(currentState)) { + return + } + + const executingMap = { ...get().toolCallsById } + executingMap[id] = { + ...executingMap[id], + state: ClientToolCallState.executing, + display: resolveToolDisplay(name, ClientToolCallState.executing, id, args), + } + set({ toolCallsById: executingMap }) + logger.info('[toolCallsById] pending → executing (class)', { id, name }) + + Promise.resolve() + .then(async () => { + // Use handleAccept for tools with interrupts, execute for others + if (hasInterrupt && typeof inst?.handleAccept === 'function') { + await inst.handleAccept(args || {}) + } else { + await inst.execute(args || {}) + } + // Success/error will be synced via registerToolStateSync + }) + .catch(() => { + const errorMap = { ...get().toolCallsById } + // Do not override terminal review/rejected + if ( + isRejectedState(errorMap[id]?.state) || + isReviewState(errorMap[id]?.state) || + isBackgroundState(errorMap[id]?.state) + ) { + return + } + errorMap[id] = { + ...errorMap[id], + state: ClientToolCallState.error, + display: resolveToolDisplay(name, ClientToolCallState.error, id, args), + } + set({ toolCallsById: errorMap }) + }) + }, 0) + return + } + } catch {} + + // Integration tools: Check auto-allowed or stay in pending state until user confirms + // This handles tools like google_calendar_*, exa_*, gmail_read, etc. that aren't in the client registry + // Only relevant if mode is 'build' (agent) + const { mode, workflowId, autoAllowedTools, executeIntegrationTool } = get() + if (mode === 'build' && workflowId) { + // Check if tool was NOT found in client registry + const def = name ? getTool(name) : undefined + const inst = getClientTool(id) as any + if (!def && !inst && name) { + // Check if this integration tool is auto-allowed - if so, execute it immediately + if (autoAllowedTools.includes(name)) { + logger.info('[build mode] Auto-executing integration tool (auto-allowed)', { id, name }) + // Defer to allow pending state to render briefly + setTimeout(() => { + executeIntegrationTool(id).catch((err) => { + logger.error('[build mode] Auto-execute integration tool failed', { + id, + name, + error: err, + }) + }) + }, 0) + } else { + // Integration tools stay in pending state until user confirms + logger.info('[build mode] Integration tool awaiting user confirmation', { + id, + name, + }) + } + } + } + }, + reasoning: (data, context, _get, set) => { + const phase = (data && (data.phase || data?.data?.phase)) as string | undefined + if (phase === 'start') { + beginThinkingBlock(context) + updateStreamingMessage(set, context) + return + } + if (phase === 'end') { + finalizeThinkingBlock(context) + updateStreamingMessage(set, context) + return + } + const chunk: string = typeof data?.data === 'string' ? data.data : data?.content || '' + if (!chunk) return + appendThinkingContent(context, chunk) + updateStreamingMessage(set, context) + }, + content: (data, context, get, set) => { + if (!data.data) return + context.pendingContent += data.data + + let contentToProcess = context.pendingContent + let hasProcessedContent = false + + const thinkingStartRegex = // + const thinkingEndRegex = /<\/thinking>/ + const designWorkflowStartRegex = // + const designWorkflowEndRegex = /<\/design_workflow>/ + + const splitTrailingPartialTag = ( + text: string, + tags: string[] + ): { text: string; remaining: string } => { + const partialIndex = text.lastIndexOf('<') + if (partialIndex < 0) { + return { text, remaining: '' } + } + const possibleTag = text.substring(partialIndex) + const matchesTagStart = tags.some((tag) => tag.startsWith(possibleTag)) + if (!matchesTagStart) { + return { text, remaining: '' } + } + return { + text: text.substring(0, partialIndex), + remaining: possibleTag, + } + } + + while (contentToProcess.length > 0) { + // Handle design_workflow tags (takes priority over other content processing) + if (context.isInDesignWorkflowBlock) { + const endMatch = designWorkflowEndRegex.exec(contentToProcess) + if (endMatch) { + const designContent = contentToProcess.substring(0, endMatch.index) + context.designWorkflowContent += designContent + context.isInDesignWorkflowBlock = false + + // Update store with complete design workflow content (available in all modes) + logger.info('[design_workflow] Tag complete, setting plan content', { + contentLength: context.designWorkflowContent.length, + }) + set({ streamingPlanContent: context.designWorkflowContent }) + + contentToProcess = contentToProcess.substring(endMatch.index + endMatch[0].length) + hasProcessedContent = true + } else { + // Still in design_workflow block, accumulate content + const { text, remaining } = splitTrailingPartialTag(contentToProcess, [ + '', + ]) + context.designWorkflowContent += text + + // Update store with partial content for streaming effect (available in all modes) + set({ streamingPlanContent: context.designWorkflowContent }) + + contentToProcess = remaining + hasProcessedContent = true + if (remaining) { + break + } + } + continue + } + + if (!context.isInThinkingBlock && !context.isInDesignWorkflowBlock) { + // Check for design_workflow start tag first + const designStartMatch = designWorkflowStartRegex.exec(contentToProcess) + if (designStartMatch) { + const textBeforeDesign = contentToProcess.substring(0, designStartMatch.index) + if (textBeforeDesign) { + appendTextBlock(context, textBeforeDesign) + hasProcessedContent = true + } + context.isInDesignWorkflowBlock = true + context.designWorkflowContent = '' + contentToProcess = contentToProcess.substring( + designStartMatch.index + designStartMatch[0].length + ) + hasProcessedContent = true + continue + } + + const nextMarkIndex = contentToProcess.indexOf('') + const nextCheckIndex = contentToProcess.indexOf('') + const hasMark = nextMarkIndex >= 0 + const hasCheck = nextCheckIndex >= 0 + + const nextTagIndex = + hasMark && hasCheck + ? Math.min(nextMarkIndex, nextCheckIndex) + : hasMark + ? nextMarkIndex + : hasCheck + ? nextCheckIndex + : -1 + + if (nextTagIndex >= 0) { + const isMarkTodo = hasMark && nextMarkIndex === nextTagIndex + const tagStart = isMarkTodo ? '' : '' + const tagEnd = isMarkTodo ? '' : '' + const closingIndex = contentToProcess.indexOf(tagEnd, nextTagIndex + tagStart.length) + + if (closingIndex === -1) { + // Partial tag; wait for additional content + break + } + + const todoId = contentToProcess + .substring(nextTagIndex + tagStart.length, closingIndex) + .trim() + logger.info( + isMarkTodo ? '[TODO] Detected marktodo tag' : '[TODO] Detected checkofftodo tag', + { todoId } + ) + + if (todoId) { + try { + get().updatePlanTodoStatus(todoId, isMarkTodo ? 'executing' : 'completed') + logger.info( + isMarkTodo + ? '[TODO] Successfully marked todo in progress' + : '[TODO] Successfully checked off todo', + { todoId } + ) + } catch (e) { + logger.error( + isMarkTodo + ? '[TODO] Failed to mark todo in progress' + : '[TODO] Failed to checkoff todo', + { todoId, error: e } + ) + } + } else { + logger.warn('[TODO] Empty todoId extracted from todo tag', { tagType: tagStart }) + } + + // Remove the tag AND newlines around it, but preserve ONE newline if both sides had them + let beforeTag = contentToProcess.substring(0, nextTagIndex) + let afterTag = contentToProcess.substring(closingIndex + tagEnd.length) + + const hadNewlineBefore = /(\r?\n)+$/.test(beforeTag) + const hadNewlineAfter = /^(\r?\n)+/.test(afterTag) + + // Strip trailing newlines before the tag + beforeTag = beforeTag.replace(/(\r?\n)+$/, '') + // Strip leading newlines after the tag + afterTag = afterTag.replace(/^(\r?\n)+/, '') + + // If there were newlines on both sides, add back ONE to preserve paragraph breaks + contentToProcess = + beforeTag + (hadNewlineBefore && hadNewlineAfter ? '\n' : '') + afterTag + context.currentTextBlock = null + hasProcessedContent = true + continue + } + } + + if (context.isInThinkingBlock) { + const endMatch = thinkingEndRegex.exec(contentToProcess) + if (endMatch) { + const thinkingContent = contentToProcess.substring(0, endMatch.index) + appendThinkingContent(context, thinkingContent) + finalizeThinkingBlock(context) + contentToProcess = contentToProcess.substring(endMatch.index + endMatch[0].length) + hasProcessedContent = true + } else { + const { text, remaining } = splitTrailingPartialTag(contentToProcess, ['']) + if (text) { + appendThinkingContent(context, text) + hasProcessedContent = true + } + contentToProcess = remaining + if (remaining) { + break + } + } + } else { + const startMatch = thinkingStartRegex.exec(contentToProcess) + if (startMatch) { + const textBeforeThinking = contentToProcess.substring(0, startMatch.index) + if (textBeforeThinking) { + appendTextBlock(context, textBeforeThinking) + hasProcessedContent = true + } + context.isInThinkingBlock = true + context.currentTextBlock = null + contentToProcess = contentToProcess.substring(startMatch.index + startMatch[0].length) + hasProcessedContent = true + } else { + // Check if content might contain partial todo tags and hold them back + let partialTagIndex = contentToProcess.lastIndexOf('<') + + // Also check for partial marktodo or checkofftodo tags + const partialMarkTodo = contentToProcess.lastIndexOf(' partialTagIndex) { + partialTagIndex = partialMarkTodo + } + if (partialCheckoffTodo > partialTagIndex) { + partialTagIndex = partialCheckoffTodo + } + + let textToAdd = contentToProcess + let remaining = '' + if (partialTagIndex >= 0 && partialTagIndex > contentToProcess.length - 50) { + textToAdd = contentToProcess.substring(0, partialTagIndex) + remaining = contentToProcess.substring(partialTagIndex) + } + if (textToAdd) { + appendTextBlock(context, textToAdd) + hasProcessedContent = true + } + contentToProcess = remaining + break + } + } + } + + context.pendingContent = contentToProcess + if (hasProcessedContent) { + updateStreamingMessage(set, context) + } + }, + done: (_data, context) => { + logger.info('[SSE] DONE EVENT RECEIVED', { + doneEventCount: context.doneEventCount, + data: _data, + }) + context.doneEventCount++ + if (context.doneEventCount >= 1) { + logger.info('[SSE] Setting streamComplete = true, stream will terminate') + context.streamComplete = true + } + }, + error: (data, context, _get, set) => { + logger.error('Stream error:', data.error) + set((state: CopilotStore) => ({ + messages: state.messages.map((msg) => + msg.id === context.messageId + ? { + ...msg, + content: context.accumulatedContent || 'An error occurred.', + error: data.error, + } + : msg + ), + })) + context.streamComplete = true + }, + stream_end: (_data, context, _get, set) => { + if (context.pendingContent) { + if (context.isInThinkingBlock && context.currentThinkingBlock) { + appendThinkingContent(context, context.pendingContent) + } else if (context.pendingContent.trim()) { + appendTextBlock(context, context.pendingContent) + } + context.pendingContent = '' + } + finalizeThinkingBlock(context) + updateStreamingMessage(set, context) + }, + default: () => {}, +} + +/** + * Helper to update a tool call with subagent data in both toolCallsById and contentBlocks + */ +function updateToolCallWithSubAgentData( + context: StreamingContext, + get: () => CopilotStore, + set: any, + parentToolCallId: string +) { + const { toolCallsById } = get() + const parentToolCall = toolCallsById[parentToolCallId] + if (!parentToolCall) { + logger.warn('[SubAgent] updateToolCallWithSubAgentData: parent tool call not found', { + parentToolCallId, + availableToolCallIds: Object.keys(toolCallsById), + }) + return + } + + // Prepare subagent blocks array for ordered display + const blocks = context.subAgentBlocks[parentToolCallId] || [] + + const updatedToolCall: CopilotToolCall = { + ...parentToolCall, + subAgentContent: context.subAgentContent[parentToolCallId] || '', + subAgentToolCalls: context.subAgentToolCalls[parentToolCallId] || [], + subAgentBlocks: blocks, + subAgentStreaming: true, + } + + logger.info('[SubAgent] Updating tool call with subagent data', { + parentToolCallId, + parentToolName: parentToolCall.name, + subAgentContentLength: updatedToolCall.subAgentContent?.length, + subAgentBlocksCount: updatedToolCall.subAgentBlocks?.length, + subAgentToolCallsCount: updatedToolCall.subAgentToolCalls?.length, + }) + + // Update in toolCallsById + const updatedMap = { ...toolCallsById, [parentToolCallId]: updatedToolCall } + set({ toolCallsById: updatedMap }) + + // Update in contentBlocks + let foundInContentBlocks = false + for (let i = 0; i < context.contentBlocks.length; i++) { + const b = context.contentBlocks[i] as any + if (b.type === 'tool_call' && b.toolCall?.id === parentToolCallId) { + context.contentBlocks[i] = { ...b, toolCall: updatedToolCall } + foundInContentBlocks = true + break + } + } + + if (!foundInContentBlocks) { + logger.warn('[SubAgent] Parent tool call not found in contentBlocks', { + parentToolCallId, + contentBlocksCount: context.contentBlocks.length, + toolCallBlockIds: context.contentBlocks + .filter((b: any) => b.type === 'tool_call') + .map((b: any) => b.toolCall?.id), + }) + } + + updateStreamingMessage(set, context) +} + +/** + * SSE handlers for subagent events (events with subagent field set) + * These handle content and tool calls from subagents like debug + */ +const subAgentSSEHandlers: Record = { + // Handle subagent response start (ignore - just a marker) + start: () => { + // Subagent start event - no action needed, parent is already tracked from subagent_start + }, + + // Handle subagent text content (reasoning/thinking) + content: (data, context, get, set) => { + const parentToolCallId = context.subAgentParentToolCallId + logger.info('[SubAgent] content event', { + parentToolCallId, + hasData: !!data.data, + dataPreview: typeof data.data === 'string' ? data.data.substring(0, 50) : null, + }) + if (!parentToolCallId || !data.data) { + logger.warn('[SubAgent] content missing parentToolCallId or data', { + parentToolCallId, + hasData: !!data.data, + }) + return + } + + appendSubAgentText(context, parentToolCallId, data.data) + + updateToolCallWithSubAgentData(context, get, set, parentToolCallId) + }, + + // Handle subagent reasoning (same as content for subagent display purposes) + reasoning: (data, context, get, set) => { + const parentToolCallId = context.subAgentParentToolCallId + const phase = data?.phase || data?.data?.phase + if (!parentToolCallId) return + + // For reasoning, we just append the content (treating start/end as markers) + if (phase === 'start' || phase === 'end') return + + const chunk = typeof data?.data === 'string' ? data.data : data?.content || '' + if (!chunk) return + + appendSubAgentText(context, parentToolCallId, chunk) + + updateToolCallWithSubAgentData(context, get, set, parentToolCallId) + }, + + // Handle subagent tool_generating (tool is being generated) + tool_generating: () => { + // Tool generating event - no action needed, we'll handle the actual tool_call + }, + + // Handle subagent tool calls - also execute client tools + tool_call: async (data, context, get, set) => { + const parentToolCallId = context.subAgentParentToolCallId + if (!parentToolCallId) return + + const toolData = data?.data || {} + const id: string | undefined = toolData.id || data?.toolCallId + const name: string | undefined = toolData.name || data?.toolName + if (!id || !name) return + const isPartial = toolData.partial === true + + // Arguments can come in different locations depending on SSE format + // Check multiple possible locations + let args = toolData.arguments || toolData.input || data?.arguments || data?.input + + // If arguments is a string, try to parse it as JSON + if (typeof args === 'string') { + try { + args = JSON.parse(args) + } catch { + logger.warn('[SubAgent] Failed to parse arguments string', { args }) + } + } + + logger.info('[SubAgent] tool_call received', { + id, + name, + hasArgs: !!args, + argsKeys: args ? Object.keys(args) : [], + toolDataKeys: Object.keys(toolData), + dataKeys: Object.keys(data || {}), + }) + + // Initialize if needed + if (!context.subAgentToolCalls[parentToolCallId]) { + context.subAgentToolCalls[parentToolCallId] = [] + } + if (!context.subAgentBlocks[parentToolCallId]) { + context.subAgentBlocks[parentToolCallId] = [] + } + + // Ensure client tool instance is registered (for execution) + ensureClientToolInstance(name, id) + + // Create or update the subagent tool call + const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex( + (tc) => tc.id === id + ) + const subAgentToolCall: CopilotToolCall = { + id, + name, + state: ClientToolCallState.pending, + ...(args ? { params: args } : {}), + display: resolveToolDisplay(name, ClientToolCallState.pending, id, args), + } + + if (existingIndex >= 0) { + context.subAgentToolCalls[parentToolCallId][existingIndex] = subAgentToolCall + } else { + context.subAgentToolCalls[parentToolCallId].push(subAgentToolCall) + + // Also add to ordered blocks + context.subAgentBlocks[parentToolCallId].push({ + type: 'subagent_tool_call', + toolCall: subAgentToolCall, + timestamp: Date.now(), + }) + } + + // Also add to main toolCallsById for proper tool execution + const { toolCallsById } = get() + const updated = { ...toolCallsById, [id]: subAgentToolCall } + set({ toolCallsById: updated }) + + updateToolCallWithSubAgentData(context, get, set, parentToolCallId) + + if (isPartial) { + return + } + + // Execute client tools in parallel (non-blocking) - same pattern as main tool_call handler + // Check if tool is auto-allowed + const { autoAllowedTools: subAgentAutoAllowed } = get() + const isSubAgentAutoAllowed = name ? subAgentAutoAllowed.includes(name) : false + + try { + const def = getTool(name) + if (def) { + const hasInterrupt = + typeof def.hasInterrupt === 'function' + ? !!def.hasInterrupt(args || {}) + : !!def.hasInterrupt + // Auto-execute if no interrupt OR if auto-allowed + if (!hasInterrupt || isSubAgentAutoAllowed) { + if (isSubAgentAutoAllowed && hasInterrupt) { + logger.info('[SubAgent] Auto-executing tool with interrupt (auto-allowed)', { + id, + name, + }) + } + // Auto-execute tools - non-blocking + const ctx = createExecutionContext({ toolCallId: id, toolName: name }) + Promise.resolve() + .then(() => def.execute(ctx, args || {})) + .catch((execErr: any) => { + logger.error('[SubAgent] Tool execution failed', { + id, + name, + error: execErr?.message, + }) + }) + } + } else { + // Fallback to class-based tools - non-blocking + const instance = getClientTool(id) + if (instance) { + const hasInterruptDisplays = !!instance.getInterruptDisplays?.() + // Auto-execute if no interrupt OR if auto-allowed + if (!hasInterruptDisplays || isSubAgentAutoAllowed) { + if (isSubAgentAutoAllowed && hasInterruptDisplays) { + logger.info('[SubAgent] Auto-executing class tool with interrupt (auto-allowed)', { + id, + name, + }) + } + Promise.resolve() + .then(() => { + // Use handleAccept for tools with interrupts, execute for others + if (hasInterruptDisplays && typeof instance.handleAccept === 'function') { + return instance.handleAccept(args || {}) + } + return instance.execute(args || {}) + }) + .catch((execErr: any) => { + logger.error('[SubAgent] Class tool execution failed', { + id, + name, + error: execErr?.message, + }) + }) + } + } else { + // Check if this is an integration tool (server-side) that should be auto-executed + const isIntegrationTool = !CLASS_TOOL_METADATA[name] + if (isIntegrationTool && isSubAgentAutoAllowed) { + logger.info('[SubAgent] Auto-executing integration tool (auto-allowed)', { + id, + name, + }) + // Execute integration tool via the store method + const { executeIntegrationTool } = get() + executeIntegrationTool(id).catch((err) => { + logger.error('[SubAgent] Integration tool auto-execution failed', { + id, + name, + error: err?.message || err, + }) + }) + } + } + } + } catch (e: any) { + logger.error('[SubAgent] Tool registry/execution error', { id, name, error: e?.message }) + } + }, + + // Handle subagent tool results + tool_result: (data, context, get, set) => { + const parentToolCallId = context.subAgentParentToolCallId + if (!parentToolCallId) return + + const toolCallId: string | undefined = data?.toolCallId || data?.data?.id + const success: boolean | undefined = data?.success !== false // Default to true if not specified + if (!toolCallId) return + + // Initialize if needed + if (!context.subAgentToolCalls[parentToolCallId]) return + if (!context.subAgentBlocks[parentToolCallId]) return + + // Update the subagent tool call state + const targetState = success ? ClientToolCallState.success : ClientToolCallState.error + const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex( + (tc) => tc.id === toolCallId + ) + + if (existingIndex >= 0) { + const existing = context.subAgentToolCalls[parentToolCallId][existingIndex] + const updatedSubAgentToolCall = { + ...existing, + state: targetState, + display: resolveToolDisplay(existing.name, targetState, toolCallId, existing.params), + } + context.subAgentToolCalls[parentToolCallId][existingIndex] = updatedSubAgentToolCall + + // Also update in ordered blocks + for (const block of context.subAgentBlocks[parentToolCallId]) { + if (block.type === 'subagent_tool_call' && block.toolCall?.id === toolCallId) { + block.toolCall = updatedSubAgentToolCall + break + } + } + + // Update the individual tool call in toolCallsById so ToolCall component gets latest state + const { toolCallsById } = get() + if (toolCallsById[toolCallId]) { + const updatedMap = { + ...toolCallsById, + [toolCallId]: updatedSubAgentToolCall, + } + set({ toolCallsById: updatedMap }) + logger.info('[SubAgent] Updated subagent tool call state in toolCallsById', { + toolCallId, + name: existing.name, + state: targetState, + }) + } + } + + updateToolCallWithSubAgentData(context, get, set, parentToolCallId) + }, + + // Handle subagent stream done - just update the streaming state + done: (data, context, get, set) => { + const parentToolCallId = context.subAgentParentToolCallId + if (!parentToolCallId) return + + // Update the tool call with final content but keep streaming true until subagent_end + updateToolCallWithSubAgentData(context, get, set, parentToolCallId) + }, +} + +// Debounced UI update queue for smoother streaming +const streamingUpdateQueue = new Map() +let streamingUpdateRAF: number | null = null +let lastBatchTime = 0 +const MIN_BATCH_INTERVAL = 16 +const MAX_BATCH_INTERVAL = 50 +const MAX_QUEUE_SIZE = 5 + +function stopStreamingUpdates() { + if (streamingUpdateRAF !== null) { + cancelAnimationFrame(streamingUpdateRAF) + streamingUpdateRAF = null + } + streamingUpdateQueue.clear() +} + +function createOptimizedContentBlocks(contentBlocks: any[]): any[] { + const result: any[] = new Array(contentBlocks.length) + for (let i = 0; i < contentBlocks.length; i++) { + const block = contentBlocks[i] + result[i] = { ...block } + } + return result +} + +function updateStreamingMessage(set: any, context: StreamingContext) { + const now = performance.now() + streamingUpdateQueue.set(context.messageId, context) + const timeSinceLastBatch = now - lastBatchTime + const shouldFlushImmediately = + streamingUpdateQueue.size >= MAX_QUEUE_SIZE || timeSinceLastBatch > MAX_BATCH_INTERVAL + + if (streamingUpdateRAF === null) { + const scheduleUpdate = () => { + streamingUpdateRAF = requestAnimationFrame(() => { + const updates = new Map(streamingUpdateQueue) + streamingUpdateQueue.clear() + streamingUpdateRAF = null + lastBatchTime = performance.now() + set((state: CopilotStore) => { + if (updates.size === 0) return state + const messages = state.messages + const lastMessage = messages[messages.length - 1] + const lastMessageUpdate = lastMessage ? updates.get(lastMessage.id) : null + if (updates.size === 1 && lastMessageUpdate) { + const newMessages = [...messages] + newMessages[messages.length - 1] = { + ...lastMessage, + content: '', + contentBlocks: + lastMessageUpdate.contentBlocks.length > 0 + ? createOptimizedContentBlocks(lastMessageUpdate.contentBlocks) + : [], + } + return { messages: newMessages } + } + return { + messages: messages.map((msg) => { + const update = updates.get(msg.id) + if (update) { + return { + ...msg, + content: '', + contentBlocks: + update.contentBlocks.length > 0 + ? createOptimizedContentBlocks(update.contentBlocks) + : [], + } + } + return msg + }), + } + }) + }) + } + if (shouldFlushImmediately) scheduleUpdate() + else setTimeout(scheduleUpdate, Math.max(0, MIN_BATCH_INTERVAL - timeSinceLastBatch)) + } +} + +async function* parseSSEStream( + reader: ReadableStreamDefaultReader, + decoder: TextDecoder +) { + let buffer = '' + while (true) { + const { done, value } = await reader.read() + if (done) break + const chunk = decoder.decode(value, { stream: true }) + buffer += chunk + const lastNewlineIndex = buffer.lastIndexOf('\n') + if (lastNewlineIndex !== -1) { + const linesToProcess = buffer.substring(0, lastNewlineIndex) + buffer = buffer.substring(lastNewlineIndex + 1) + const lines = linesToProcess.split('\n') + for (let i = 0; i < lines.length; i++) { + const line = lines[i] + if (line.length === 0) continue + if (line.charCodeAt(0) === 100 && line.startsWith(DATA_PREFIX)) { + try { + const jsonStr = line.substring(DATA_PREFIX_LENGTH) + yield JSON.parse(jsonStr) + } catch (error) { + logger.warn('Failed to parse SSE data:', error) + } + } + } + } + } +} + +// Initial state (subset required for UI/streaming) +const initialState = { + mode: 'build' as const, + selectedModel: 'claude-4.5-opus' as CopilotStore['selectedModel'], + agentPrefetch: false, + enabledModels: null as string[] | null, // Null means not loaded yet, empty array means all disabled + isCollapsed: false, + currentChat: null as CopilotChat | null, + chats: [] as CopilotChat[], + messages: [] as CopilotMessage[], + checkpoints: [] as any[], + messageCheckpoints: {} as Record, + messageSnapshots: {} as Record, + isLoading: false, + isLoadingChats: false, + isLoadingCheckpoints: false, + isSendingMessage: false, + isSaving: false, + isRevertingCheckpoint: false, + isAborting: false, + error: null as string | null, + saveError: null as string | null, + checkpointError: null as string | null, + workflowId: null as string | null, + abortController: null as AbortController | null, + chatsLastLoadedAt: null as Date | null, + chatsLoadedForWorkflow: null as string | null, + revertState: null as { messageId: string; messageContent: string } | null, + inputValue: '', + planTodos: [] as Array<{ id: string; content: string; completed?: boolean; executing?: boolean }>, + showPlanTodos: false, + streamingPlanContent: '', + toolCallsById: {} as Record, + suppressAutoSelect: false, + autoAllowedTools: [] as string[], + messageQueue: [] as import('./types').QueuedMessage[], + suppressAbortContinueOption: false, + sensitiveCredentialIds: new Set(), +} + +export const useCopilotStore = create()( + devtools((set, get) => ({ + ...initialState, + + // Basic mode controls + setMode: (mode) => set({ mode }), + + // Clear messages (don't clear streamingPlanContent - let it persist) + clearMessages: () => set({ messages: [] }), + + // Workflow selection + setWorkflowId: async (workflowId: string | null) => { + const currentWorkflowId = get().workflowId + if (currentWorkflowId === workflowId) return + const { isSendingMessage } = get() + if (isSendingMessage) get().abortMessage() + + // Abort all in-progress tools and clear any diff preview + abortAllInProgressTools(set, get) + try { + useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false }) + } catch {} + + set({ + ...initialState, + workflowId, + mode: get().mode, + selectedModel: get().selectedModel, + agentPrefetch: get().agentPrefetch, + }) + }, + + // Chats (minimal implementation for visibility) + validateCurrentChat: () => { + const { currentChat, workflowId, chats } = get() + if (!currentChat || !workflowId) return false + const chatExists = chats.some((c) => c.id === currentChat.id) + if (!chatExists) { + set({ currentChat: null, messages: [] }) + return false + } + return true + }, + + selectChat: async (chat: CopilotChat) => { + const { isSendingMessage, currentChat, workflowId } = get() + if (!workflowId) { + return + } + if (currentChat && currentChat.id !== chat.id && isSendingMessage) get().abortMessage() + + // Abort in-progress tools and clear diff when changing chats + abortAllInProgressTools(set, get) + try { + useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false }) + } catch {} + + // Restore plan content and config (mode/model) from selected chat + const planArtifact = chat.planArtifact || '' + const chatConfig = chat.config || {} + const chatMode = chatConfig.mode || get().mode + const chatModel = chatConfig.model || get().selectedModel + + logger.info('[Chat] Restoring chat config', { + chatId: chat.id, + mode: chatMode, + model: chatModel, + hasPlanArtifact: !!planArtifact, + }) + + // Capture previous chat/messages for optimistic background save + const previousChat = currentChat + const previousMessages = get().messages + const previousMode = get().mode + const previousModel = get().selectedModel + + // Optimistically set selected chat and normalize messages for UI + const normalizedMessages = normalizeMessagesForUI(chat.messages || []) + const toolCallsById = buildToolCallsById(normalizedMessages) + + set({ + currentChat: chat, + messages: normalizedMessages, + toolCallsById, + planTodos: [], + showPlanTodos: false, + streamingPlanContent: planArtifact, + mode: chatMode, + selectedModel: chatModel as CopilotStore['selectedModel'], + suppressAutoSelect: false, + }) + + // Background-save the previous chat's latest messages, plan artifact, and config before switching (optimistic) + try { + if (previousChat && previousChat.id !== chat.id) { + const dbMessages = validateMessagesForLLM(previousMessages) + const previousPlanArtifact = get().streamingPlanContent + fetch('/api/copilot/chat/update-messages', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + chatId: previousChat.id, + messages: dbMessages, + planArtifact: previousPlanArtifact || null, + config: { + mode: previousMode, + model: previousModel, + }, + }), + }).catch(() => {}) + } + } catch {} + + // Refresh selected chat from server to ensure we have latest messages/tool calls + try { + const response = await fetch(`/api/copilot/chat?workflowId=${workflowId}`) + if (!response.ok) throw new Error(`Failed to fetch latest chat data: ${response.status}`) + const data = await response.json() + if (data.success && Array.isArray(data.chats)) { + const latestChat = data.chats.find((c: CopilotChat) => c.id === chat.id) + if (latestChat) { + const normalizedMessages = normalizeMessagesForUI(latestChat.messages || []) + const toolCallsById = buildToolCallsById(normalizedMessages) + + set({ + currentChat: latestChat, + messages: normalizedMessages, + chats: (get().chats || []).map((c: CopilotChat) => + c.id === chat.id ? latestChat : c + ), + toolCallsById, + }) + try { + await get().loadMessageCheckpoints(latestChat.id) + } catch {} + } + } + } catch {} + }, + + createNewChat: async () => { + const { isSendingMessage } = get() + if (isSendingMessage) get().abortMessage() + + // Abort in-progress tools and clear diff on new chat + abortAllInProgressTools(set, get) + try { + useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false }) + } catch {} + + // Background-save the current chat before clearing (optimistic) + try { + const { currentChat, streamingPlanContent, mode, selectedModel } = get() + if (currentChat) { + const currentMessages = get().messages + const dbMessages = validateMessagesForLLM(currentMessages) + fetch('/api/copilot/chat/update-messages', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + chatId: currentChat.id, + messages: dbMessages, + planArtifact: streamingPlanContent || null, + config: { + mode, + model: selectedModel, + }, + }), + }).catch(() => {}) + } + } catch {} + + set({ + currentChat: null, + messages: [], + messageCheckpoints: {}, + planTodos: [], + showPlanTodos: false, + streamingPlanContent: '', + suppressAutoSelect: true, + }) + }, + + deleteChat: async (chatId: string) => { + try { + // Call delete API + const response = await fetch('/api/copilot/chat/delete', { + method: 'DELETE', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ chatId }), + }) + + if (!response.ok) { + throw new Error(`Failed to delete chat: ${response.status}`) + } + + // Remove from local state + set((state) => ({ + chats: state.chats.filter((c) => c.id !== chatId), + // If deleted chat was current, clear it + currentChat: state.currentChat?.id === chatId ? null : state.currentChat, + messages: state.currentChat?.id === chatId ? [] : state.messages, + })) + + logger.info('Chat deleted', { chatId }) + } catch (error) { + logger.error('Failed to delete chat:', error) + throw error + } + }, + + areChatsFresh: (_workflowId: string) => false, + + loadChats: async (_forceRefresh = false) => { + const { workflowId } = get() + + if (!workflowId) { + set({ chats: [], isLoadingChats: false }) + return + } + + // For now always fetch fresh + set({ isLoadingChats: true }) + try { + const url = `/api/copilot/chat?workflowId=${workflowId}` + const response = await fetch(url) + if (!response.ok) { + throw new Error(`Failed to fetch chats: ${response.status}`) + } + const data = await response.json() + if (data.success && Array.isArray(data.chats)) { + const now = new Date() + set({ + chats: data.chats, + isLoadingChats: false, + chatsLastLoadedAt: now, + chatsLoadedForWorkflow: workflowId, + }) + + if (data.chats.length > 0) { + const { currentChat, isSendingMessage, suppressAutoSelect } = get() + const currentChatStillExists = + currentChat && data.chats.some((c: CopilotChat) => c.id === currentChat.id) + + if (currentChatStillExists) { + const updatedCurrentChat = data.chats.find( + (c: CopilotChat) => c.id === currentChat!.id + )! + if (isSendingMessage) { + set({ currentChat: { ...updatedCurrentChat, messages: get().messages } }) + } else { + const normalizedMessages = normalizeMessagesForUI(updatedCurrentChat.messages || []) + + // Restore plan artifact and config from refreshed chat + const refreshedPlanArtifact = updatedCurrentChat.planArtifact || '' + const refreshedConfig = updatedCurrentChat.config || {} + const refreshedMode = refreshedConfig.mode || get().mode + const refreshedModel = refreshedConfig.model || get().selectedModel + const toolCallsById = buildToolCallsById(normalizedMessages) + + set({ + currentChat: updatedCurrentChat, + messages: normalizedMessages, + toolCallsById, + streamingPlanContent: refreshedPlanArtifact, + mode: refreshedMode, + selectedModel: refreshedModel as CopilotStore['selectedModel'], + }) + } + try { + await get().loadMessageCheckpoints(updatedCurrentChat.id) + } catch {} + } else if (!isSendingMessage && !suppressAutoSelect) { + const mostRecentChat: CopilotChat = data.chats[0] + const normalizedMessages = normalizeMessagesForUI(mostRecentChat.messages || []) + + // Restore plan artifact and config from most recent chat + const planArtifact = mostRecentChat.planArtifact || '' + const chatConfig = mostRecentChat.config || {} + const chatMode = chatConfig.mode || get().mode + const chatModel = chatConfig.model || get().selectedModel + + logger.info('[Chat] Auto-selecting most recent chat with config', { + chatId: mostRecentChat.id, + mode: chatMode, + model: chatModel, + hasPlanArtifact: !!planArtifact, + }) + + const toolCallsById = buildToolCallsById(normalizedMessages) + + set({ + currentChat: mostRecentChat, + messages: normalizedMessages, + toolCallsById, + streamingPlanContent: planArtifact, + mode: chatMode, + selectedModel: chatModel as CopilotStore['selectedModel'], + }) + try { + await get().loadMessageCheckpoints(mostRecentChat.id) + } catch {} + } + } else { + set({ currentChat: null, messages: [] }) + } + } else { + throw new Error('Invalid response format') + } + } catch (error) { + set({ + chats: [], + isLoadingChats: false, + chatsLoadedForWorkflow: workflowId, + error: error instanceof Error ? error.message : 'Failed to load chats', + }) + } + }, + + // Send a message (streaming only) + sendMessage: async (message: string, options = {}) => { + const { + workflowId, + currentChat, + mode, + revertState, + isSendingMessage, + abortController: activeAbortController, + } = get() + const { + stream = true, + fileAttachments, + contexts, + messageId, + queueIfBusy = true, + } = options as { + stream?: boolean + fileAttachments?: MessageFileAttachment[] + contexts?: ChatContext[] + messageId?: string + queueIfBusy?: boolean + } + + if (!workflowId) return + + // If already sending a message, queue this one instead unless bypassing queue + if (isSendingMessage && !activeAbortController) { + logger.warn('[Copilot] sendMessage: stale sending state detected, clearing', { + originalMessageId: messageId, + }) + set({ isSendingMessage: false }) + } else if (isSendingMessage && activeAbortController?.signal.aborted) { + logger.warn('[Copilot] sendMessage: aborted controller detected, clearing', { + originalMessageId: messageId, + }) + set({ isSendingMessage: false, abortController: null }) + } else if (isSendingMessage) { + if (queueIfBusy) { + get().addToQueue(message, { fileAttachments, contexts, messageId }) + logger.info('[Copilot] Message queued (already sending)', { + queueLength: get().messageQueue.length + 1, + originalMessageId: messageId, + }) + return + } + get().abortMessage({ suppressContinueOption: true }) + } + + const nextAbortController = new AbortController() + set({ isSendingMessage: true, error: null, abortController: nextAbortController }) + + const userMessage = createUserMessage(message, fileAttachments, contexts, messageId) + const streamingMessage = createStreamingMessage() + const snapshot = workflowId ? buildCheckpointWorkflowState(workflowId) : null + if (snapshot) { + set((state) => ({ + messageSnapshots: { ...state.messageSnapshots, [userMessage.id]: snapshot }, + })) + } + + get() + .loadSensitiveCredentialIds() + .catch((err) => { + logger.warn('[Copilot] Failed to load sensitive credential IDs', err) + }) + get() + .loadAutoAllowedTools() + .catch((err) => { + logger.warn('[Copilot] Failed to load auto-allowed tools', err) + }) + + let newMessages: CopilotMessage[] + if (revertState) { + const currentMessages = get().messages + newMessages = [...currentMessages, userMessage, streamingMessage] + set({ revertState: null, inputValue: '' }) + } else { + const currentMessages = get().messages + // If messageId is provided, check if it already exists (e.g., from edit flow) + const existingIndex = messageId ? currentMessages.findIndex((m) => m.id === messageId) : -1 + if (existingIndex !== -1) { + // Replace existing message instead of adding new one + newMessages = [...currentMessages.slice(0, existingIndex), userMessage, streamingMessage] + } else { + // Add new messages normally + newMessages = [...currentMessages, userMessage, streamingMessage] + } + } + + const isFirstMessage = get().messages.length === 0 && !currentChat?.title + set((state) => ({ + messages: newMessages, + currentUserMessageId: userMessage.id, + })) + + if (isFirstMessage) { + const optimisticTitle = message.length > 50 ? `${message.substring(0, 47)}...` : message + set((state) => ({ + currentChat: state.currentChat + ? { ...state.currentChat, title: optimisticTitle } + : state.currentChat, + chats: state.currentChat + ? state.chats.map((c) => + c.id === state.currentChat!.id ? { ...c, title: optimisticTitle } : c + ) + : state.chats, + })) + } + + try { + // Debug: log contexts presence before sending + try { + logger.info('sendMessage: preparing request', { + hasContexts: Array.isArray(contexts), + contextsCount: Array.isArray(contexts) ? contexts.length : 0, + contextsPreview: Array.isArray(contexts) + ? contexts.map((c: any) => ({ + kind: c?.kind, + chatId: (c as any)?.chatId, + workflowId: (c as any)?.workflowId, + label: (c as any)?.label, + })) + : undefined, + }) + } catch {} + + // Prepend design document to message if available + const { streamingPlanContent } = get() + let messageToSend = message + if (streamingPlanContent?.trim()) { + messageToSend = `Design Document:\n\n${streamingPlanContent}\n\n==============\n\nUser Query:\n\n${message}` + logger.info('[DesignDocument] Prepending plan content to message', { + planLength: streamingPlanContent.length, + originalMessageLength: message.length, + finalMessageLength: messageToSend.length, + }) + } + + // Call copilot API + const apiMode: CopilotTransportMode = + mode === 'ask' ? 'ask' : mode === 'plan' ? 'plan' : 'agent' + + // Extract slash commands from contexts (lowercase) and filter them out from contexts + // Map UI command IDs to API command IDs (e.g., "actions" -> "superagent") + const uiToApiCommandMap: Record = { actions: 'superagent' } + const commands = contexts + ?.filter((c) => c.kind === 'slash_command' && 'command' in c) + .map((c) => { + const uiCommand = (c as any).command.toLowerCase() + return uiToApiCommandMap[uiCommand] || uiCommand + }) as string[] | undefined + const filteredContexts = contexts?.filter((c) => c.kind !== 'slash_command') + + const result = await sendStreamingMessage({ + message: messageToSend, + userMessageId: userMessage.id, + chatId: currentChat?.id, + workflowId: workflowId || undefined, + mode: apiMode, + model: get().selectedModel, + prefetch: get().agentPrefetch, + createNewChat: !currentChat, + stream, + fileAttachments, + contexts: filteredContexts, + commands: commands?.length ? commands : undefined, + abortSignal: nextAbortController.signal, + }) + + if (result.success && result.stream) { + await get().handleStreamingResponse( + result.stream, + streamingMessage.id, + false, + userMessage.id + ) + set({ chatsLastLoadedAt: null, chatsLoadedForWorkflow: null }) + } else { + if (result.error === 'Request was aborted') { + return + } + + // Check for specific status codes and provide custom messages + let errorContent = result.error || 'Failed to send message' + let errorType: + | 'usage_limit' + | 'unauthorized' + | 'forbidden' + | 'rate_limit' + | 'upgrade_required' + | undefined + if (result.status === 401) { + errorContent = + '_Unauthorized request. You need a valid API key to use the copilot. You can get one by going to [sim.ai](https://sim.ai) settings and generating one there._' + errorType = 'unauthorized' + } else if (result.status === 402) { + errorContent = + '_Usage limit exceeded. To continue using this service, upgrade your plan or increase your usage limit to:_' + errorType = 'usage_limit' + } else if (result.status === 403) { + errorContent = + '_Provider config not allowed for non-enterprise users. Please remove the provider config and try again_' + errorType = 'forbidden' + } else if (result.status === 426) { + errorContent = + '_Please upgrade to the latest version of the Sim platform to continue using the copilot._' + errorType = 'upgrade_required' + } else if (result.status === 429) { + errorContent = '_Provider rate limit exceeded. Please try again later._' + errorType = 'rate_limit' + } + + const errorMessage = createErrorMessage(streamingMessage.id, errorContent, errorType) + set((state) => ({ + messages: state.messages.map((m) => (m.id === streamingMessage.id ? errorMessage : m)), + error: errorContent, + isSendingMessage: false, + abortController: null, + })) + } + } catch (error) { + if (error instanceof Error && error.name === 'AbortError') return + const errorMessage = createErrorMessage( + streamingMessage.id, + 'Sorry, I encountered an error while processing your message. Please try again.' + ) + set((state) => ({ + messages: state.messages.map((m) => (m.id === streamingMessage.id ? errorMessage : m)), + error: error instanceof Error ? error.message : 'Failed to send message', + isSendingMessage: false, + abortController: null, + })) + } + }, + + // Abort streaming + abortMessage: (options?: { suppressContinueOption?: boolean }) => { + const { abortController, isSendingMessage, messages } = get() + if (!isSendingMessage || !abortController) return + const suppressContinueOption = options?.suppressContinueOption === true + set({ isAborting: true, suppressAbortContinueOption: suppressContinueOption }) + try { + abortController.abort() + stopStreamingUpdates() + const lastMessage = messages[messages.length - 1] + if (lastMessage && lastMessage.role === 'assistant') { + const textContent = + lastMessage.contentBlocks + ?.filter((b) => b.type === 'text') + .map((b: any) => b.content) + .join('') || '' + const nextContentBlocks = suppressContinueOption + ? (lastMessage.contentBlocks ?? []) + : appendContinueOptionBlock( + lastMessage.contentBlocks ? [...lastMessage.contentBlocks] : [] + ) + set((state) => ({ + messages: state.messages.map((msg) => + msg.id === lastMessage.id + ? { + ...msg, + content: suppressContinueOption + ? textContent.trim() || 'Message was aborted' + : appendContinueOption(textContent.trim() || 'Message was aborted'), + contentBlocks: nextContentBlocks, + } + : msg + ), + isSendingMessage: false, + isAborting: false, + // Keep abortController so streaming loop can check signal.aborted + // It will be nulled when streaming completes or new message starts + })) + } else { + set({ + isSendingMessage: false, + isAborting: false, + // Keep abortController so streaming loop can check signal.aborted + }) + } + + // Immediately put all in-progress tools into aborted state + abortAllInProgressTools(set, get) + + // Persist whatever contentBlocks/text we have to keep ordering for reloads + const { currentChat, streamingPlanContent, mode, selectedModel } = get() + if (currentChat) { + try { + const currentMessages = get().messages + const dbMessages = validateMessagesForLLM(currentMessages) + fetch('/api/copilot/chat/update-messages', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + chatId: currentChat.id, + messages: dbMessages, + planArtifact: streamingPlanContent || null, + config: { + mode, + model: selectedModel, + }, + }), + }).catch(() => {}) + } catch {} + } + } catch { + set({ isSendingMessage: false, isAborting: false }) + } + }, + + // Implicit feedback (send a continuation) - minimal + sendImplicitFeedback: async (implicitFeedback: string) => { + const { workflowId, currentChat, mode, selectedModel } = get() + if (!workflowId) return + const abortController = new AbortController() + set({ isSendingMessage: true, error: null, abortController }) + const newAssistantMessage = createStreamingMessage() + set((state) => ({ messages: [...state.messages, newAssistantMessage] })) + try { + const apiMode: 'ask' | 'agent' | 'plan' = + mode === 'ask' ? 'ask' : mode === 'plan' ? 'plan' : 'agent' + const result = await sendStreamingMessage({ + message: 'Please continue your response.', + chatId: currentChat?.id, + workflowId, + mode: apiMode, + model: selectedModel, + prefetch: get().agentPrefetch, + createNewChat: !currentChat, + stream: true, + implicitFeedback, + abortSignal: abortController.signal, + }) + if (result.success && result.stream) { + await get().handleStreamingResponse(result.stream, newAssistantMessage.id, false) + } else { + if (result.error === 'Request was aborted') return + const errorMessage = createErrorMessage( + newAssistantMessage.id, + result.error || 'Failed to send implicit feedback' + ) + set((state) => ({ + messages: state.messages.map((msg) => + msg.id === newAssistantMessage.id ? errorMessage : msg + ), + error: result.error || 'Failed to send implicit feedback', + isSendingMessage: false, + abortController: null, + })) + } + } catch (error) { + if (error instanceof Error && error.name === 'AbortError') return + const errorMessage = createErrorMessage( + newAssistantMessage.id, + 'Sorry, I encountered an error while processing your feedback. Please try again.' + ) + set((state) => ({ + messages: state.messages.map((msg) => + msg.id === newAssistantMessage.id ? errorMessage : msg + ), + error: error instanceof Error ? error.message : 'Failed to send implicit feedback', + isSendingMessage: false, + abortController: null, + })) + } + }, + + // Tool-call related APIs are stubbed for now + setToolCallState: (toolCall: any, newState: any) => { + try { + const id: string | undefined = toolCall?.id + if (!id) return + const map = { ...get().toolCallsById } + const current = map[id] + if (!current) return + // Preserve rejected state from being overridden + if ( + isRejectedState(current.state) && + (newState === 'success' || newState === (ClientToolCallState as any).success) + ) { + return + } + let norm: ClientToolCallState = current.state + if (newState === 'executing') norm = ClientToolCallState.executing + else if (newState === 'errored' || newState === 'error') norm = ClientToolCallState.error + else if (newState === 'rejected') norm = ClientToolCallState.rejected + else if (newState === 'pending') norm = ClientToolCallState.pending + else if (newState === 'success' || newState === 'accepted') + norm = ClientToolCallState.success + else if (newState === 'aborted') norm = ClientToolCallState.aborted + else if (typeof newState === 'number') norm = newState as unknown as ClientToolCallState + map[id] = { + ...current, + state: norm, + display: resolveToolDisplay(current.name, norm, id, current.params), + } + set({ toolCallsById: map }) + } catch {} + }, + + updateToolCallParams: (toolCallId: string, params: Record) => { + try { + if (!toolCallId) return + const map = { ...get().toolCallsById } + const current = map[toolCallId] + if (!current) return + const updatedParams = { ...current.params, ...params } + map[toolCallId] = { + ...current, + params: updatedParams, + display: resolveToolDisplay(current.name, current.state, toolCallId, updatedParams), + } + set({ toolCallsById: map }) + } catch {} + }, + updatePreviewToolCallState: ( + toolCallState: 'accepted' | 'rejected' | 'error', + toolCallId?: string + ) => { + const stateMap: Record = { + accepted: ClientToolCallState.success, + rejected: ClientToolCallState.rejected, + error: ClientToolCallState.error, + } + const targetState = stateMap[toolCallState] || ClientToolCallState.success + const { toolCallsById } = get() + // Determine target tool + let id = toolCallId + if (!id) { + // Prefer the latest assistant message's build/edit tool_call + const messages = get().messages + outer: for (let mi = messages.length - 1; mi >= 0; mi--) { + const m = messages[mi] + if (m.role !== 'assistant' || !m.contentBlocks) continue + const blocks = m.contentBlocks as any[] + for (let bi = blocks.length - 1; bi >= 0; bi--) { + const b = blocks[bi] + if (b?.type === 'tool_call') { + const tn = b.toolCall?.name + if (tn === 'edit_workflow') { + id = b.toolCall?.id + break outer + } + } + } + } + // Fallback to map if not found in messages + if (!id) { + const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow') + id = candidates.length ? candidates[candidates.length - 1].id : undefined + } + } + if (!id) return + const current = toolCallsById[id] + if (!current) return + // Do not override a rejected tool with success + if (isRejectedState(current.state) && targetState === (ClientToolCallState as any).success) { + return + } + + // Update store map + const updatedMap = { ...toolCallsById } + const updatedDisplay = resolveToolDisplay(current.name, targetState, id, current.params) + updatedMap[id] = { + ...current, + state: targetState, + display: updatedDisplay, + } + set({ toolCallsById: updatedMap }) + + // Update inline content block in the latest assistant message + set((s) => { + const messages = [...s.messages] + for (let mi = messages.length - 1; mi >= 0; mi--) { + const m = messages[mi] + if (m.role !== 'assistant' || !m.contentBlocks) continue + let changed = false + const blocks = m.contentBlocks.map((b: any) => { + if (b.type === 'tool_call' && b.toolCall?.id === id) { + changed = true + const prev = b.toolCall || {} + return { + ...b, + toolCall: { + ...prev, + id, + name: current.name, + state: targetState, + display: updatedDisplay, + params: current.params, + }, + } + } + return b + }) + if (changed) { + messages[mi] = { ...m, contentBlocks: blocks } + break + } + } + return { messages } + }) + + // Notify backend mark-complete to finalize tool server-side + try { + fetch('/api/copilot/tools/mark-complete', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + id, + name: current.name, + status: + targetState === ClientToolCallState.success + ? 200 + : targetState === ClientToolCallState.rejected + ? 409 + : 500, + message: toolCallState, + }), + }).catch(() => {}) + } catch {} + }, + + sendDocsMessage: async (query: string) => { + await get().sendMessage(query) + }, + + saveChatMessages: async (_chatId: string) => {}, + + loadCheckpoints: async (_chatId: string) => set({ checkpoints: [] }), + + loadMessageCheckpoints: async (chatId: string) => { + const { workflowId } = get() + if (!workflowId) return + set({ isLoadingCheckpoints: true, checkpointError: null }) + try { + const response = await fetch(`/api/copilot/checkpoints?chatId=${chatId}`) + if (!response.ok) throw new Error(`Failed to load checkpoints: ${response.statusText}`) + const data = await response.json() + if (data.success && Array.isArray(data.checkpoints)) { + const grouped = data.checkpoints.reduce((acc: Record, cp: any) => { + const key = cp.messageId || '__no_message__' + acc[key] = acc[key] || [] + acc[key].push(cp) + return acc + }, {}) + set({ messageCheckpoints: grouped, isLoadingCheckpoints: false }) + } else { + throw new Error('Invalid checkpoints response') + } + } catch (error) { + set({ + isLoadingCheckpoints: false, + checkpointError: error instanceof Error ? error.message : 'Failed to load checkpoints', + }) + } + }, + + // Revert to a specific checkpoint and apply state locally + revertToCheckpoint: async (checkpointId: string) => { + const { workflowId } = get() + if (!workflowId) return + set({ isRevertingCheckpoint: true, checkpointError: null }) + try { + const { messageCheckpoints } = get() + const checkpointMessageId = Object.entries(messageCheckpoints).find(([, cps]) => + (cps || []).some((cp: any) => cp?.id === checkpointId) + )?.[0] + const response = await fetch('/api/copilot/checkpoints/revert', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ checkpointId }), + }) + if (!response.ok) { + const errorText = await response.text().catch(() => '') + throw new Error(errorText || `Failed to revert: ${response.statusText}`) + } + const result = await response.json() + const reverted = result?.checkpoint?.workflowState || null + if (reverted) { + // Clear any active diff preview + try { + useWorkflowDiffStore.getState().clearDiff() + } catch {} + + // Apply to main workflow store + useWorkflowStore.setState({ + blocks: reverted.blocks || {}, + edges: reverted.edges || [], + loops: reverted.loops || {}, + parallels: reverted.parallels || {}, + lastSaved: reverted.lastSaved || Date.now(), + deploymentStatuses: reverted.deploymentStatuses || {}, + }) + + // Extract and apply subblock values + const values: Record> = {} + Object.entries(reverted.blocks || {}).forEach(([blockId, block]: [string, any]) => { + values[blockId] = {} + Object.entries((block as any).subBlocks || {}).forEach( + ([subId, sub]: [string, any]) => { + values[blockId][subId] = (sub as any)?.value + } + ) + }) + const subState = useSubBlockStore.getState() + useSubBlockStore.setState({ + workflowValues: { + ...subState.workflowValues, + [workflowId]: values, + }, + }) + } + if (checkpointMessageId) { + const { messageCheckpoints: currentCheckpoints } = get() + const updatedCheckpoints = { ...currentCheckpoints, [checkpointMessageId]: [] } + set({ messageCheckpoints: updatedCheckpoints }) + } + set({ isRevertingCheckpoint: false }) + } catch (error) { + set({ + isRevertingCheckpoint: false, + checkpointError: error instanceof Error ? error.message : 'Failed to revert checkpoint', + }) + throw error + } + }, + getCheckpointsForMessage: (messageId: string) => { + const { messageCheckpoints } = get() + return messageCheckpoints[messageId] || [] + }, + saveMessageCheckpoint: async (messageId: string) => { + if (!messageId) return false + return saveMessageCheckpoint(messageId, get, set) + }, + + // Handle streaming response + handleStreamingResponse: async ( + stream: ReadableStream, + assistantMessageId: string, + isContinuation = false, + triggerUserMessageId?: string + ) => { + const reader = stream.getReader() + const decoder = new TextDecoder() + const startTimeMs = Date.now() + + const context: StreamingContext = { + messageId: assistantMessageId, + accumulatedContent: new StringBuilder(), + contentBlocks: [], + currentTextBlock: null, + isInThinkingBlock: false, + currentThinkingBlock: null, + isInDesignWorkflowBlock: false, + designWorkflowContent: '', + pendingContent: '', + doneEventCount: 0, + subAgentContent: {}, + subAgentToolCalls: {}, + subAgentBlocks: {}, + } + + if (isContinuation) { + const { messages } = get() + const existingMessage = messages.find((m) => m.id === assistantMessageId) + if (existingMessage) { + if (existingMessage.content) context.accumulatedContent.append(existingMessage.content) + context.contentBlocks = existingMessage.contentBlocks + ? [...existingMessage.contentBlocks] + : [] + } + } + + const timeoutId = setTimeout(() => { + logger.warn('Stream timeout reached, completing response') + reader.cancel() + }, 600000) + + try { + for await (const data of parseSSEStream(reader, decoder)) { + const { abortController } = get() + if (abortController?.signal.aborted) { + context.wasAborted = true + const { suppressAbortContinueOption } = get() + context.suppressContinueOption = suppressAbortContinueOption === true + if (suppressAbortContinueOption) { + set({ suppressAbortContinueOption: false }) + } + context.pendingContent = '' + finalizeThinkingBlock(context) + stopStreamingUpdates() + reader.cancel() + break + } + + // Log SSE events for debugging + logger.info('[SSE] Received event', { + type: data.type, + hasSubAgent: !!data.subagent, + subagent: data.subagent, + dataPreview: + typeof data.data === 'string' + ? data.data.substring(0, 100) + : JSON.stringify(data.data)?.substring(0, 100), + }) + + // Handle subagent_start to track parent tool call + if (data.type === 'subagent_start') { + const toolCallId = data.data?.tool_call_id + if (toolCallId) { + context.subAgentParentToolCallId = toolCallId + // Mark the parent tool call as streaming + const { toolCallsById } = get() + const parentToolCall = toolCallsById[toolCallId] + if (parentToolCall) { + const updatedToolCall: CopilotToolCall = { + ...parentToolCall, + subAgentStreaming: true, + } + const updatedMap = { ...toolCallsById, [toolCallId]: updatedToolCall } + set({ toolCallsById: updatedMap }) + } + logger.info('[SSE] Subagent session started', { + subagent: data.subagent, + parentToolCallId: toolCallId, + }) + } + continue + } + + // Handle subagent_end to finalize subagent content + if (data.type === 'subagent_end') { + const parentToolCallId = context.subAgentParentToolCallId + if (parentToolCallId) { + // Mark subagent streaming as complete + const { toolCallsById } = get() + const parentToolCall = toolCallsById[parentToolCallId] + if (parentToolCall) { + const updatedToolCall: CopilotToolCall = { + ...parentToolCall, + subAgentContent: context.subAgentContent[parentToolCallId] || '', + subAgentToolCalls: context.subAgentToolCalls[parentToolCallId] || [], + subAgentBlocks: context.subAgentBlocks[parentToolCallId] || [], + subAgentStreaming: false, // Done streaming + } + const updatedMap = { ...toolCallsById, [parentToolCallId]: updatedToolCall } + set({ toolCallsById: updatedMap }) + logger.info('[SSE] Subagent session ended', { + subagent: data.subagent, + parentToolCallId, + contentLength: context.subAgentContent[parentToolCallId]?.length || 0, + toolCallCount: context.subAgentToolCalls[parentToolCallId]?.length || 0, + }) + } + } + context.subAgentParentToolCallId = undefined + continue + } + + // Check if this is a subagent event (has subagent field) + if (data.subagent) { + const parentToolCallId = context.subAgentParentToolCallId + if (!parentToolCallId) { + logger.warn('[SSE] Subagent event without parent tool call ID', { + type: data.type, + subagent: data.subagent, + }) + continue + } + + logger.info('[SSE] Processing subagent event', { + type: data.type, + subagent: data.subagent, + parentToolCallId, + hasHandler: !!subAgentSSEHandlers[data.type], + }) + + const subAgentHandler = subAgentSSEHandlers[data.type] + if (subAgentHandler) { + await subAgentHandler(data, context, get, set) + } else { + logger.warn('[SSE] No handler for subagent event type', { type: data.type }) + } + // Skip regular handlers for subagent events + if (context.streamComplete) break + continue + } + + const handler = sseHandlers[data.type] || sseHandlers.default + await handler(data, context, get, set) + if (context.streamComplete) break + } + + if (!context.wasAborted && sseHandlers.stream_end) { + sseHandlers.stream_end({}, context, get, set) + } + + if (streamingUpdateRAF !== null) { + cancelAnimationFrame(streamingUpdateRAF) + streamingUpdateRAF = null + } + streamingUpdateQueue.clear() + + let sanitizedContentBlocks: any[] = [] + if (context.contentBlocks && context.contentBlocks.length > 0) { + const optimizedBlocks = createOptimizedContentBlocks(context.contentBlocks) + sanitizedContentBlocks = optimizedBlocks.map((block: any) => + block.type === TEXT_BLOCK_TYPE && typeof block.content === 'string' + ? { ...block, content: stripTodoTags(block.content) } + : block + ) + } + if (context.wasAborted && !context.suppressContinueOption) { + sanitizedContentBlocks = appendContinueOptionBlock(sanitizedContentBlocks) + } + + if (context.contentBlocks) { + context.contentBlocks.forEach((block) => { + if (block.type === TEXT_BLOCK_TYPE || block.type === THINKING_BLOCK_TYPE) { + contentBlockPool.release(block) + } + }) + } + + const finalContent = stripTodoTags(context.accumulatedContent.toString()) + const finalContentWithOptions = + context.wasAborted && !context.suppressContinueOption + ? appendContinueOption(finalContent) + : finalContent + set((state) => { + const snapshotId = state.currentUserMessageId + const nextSnapshots = + snapshotId && state.messageSnapshots[snapshotId] + ? (() => { + const updated = { ...state.messageSnapshots } + delete updated[snapshotId] + return updated + })() + : state.messageSnapshots + return { + messages: state.messages.map((msg) => + msg.id === assistantMessageId + ? { + ...msg, + content: finalContentWithOptions, + contentBlocks: sanitizedContentBlocks, + } + : msg + ), + isSendingMessage: false, + isAborting: false, + abortController: null, + currentUserMessageId: null, + messageSnapshots: nextSnapshots, + } + }) + + if (context.newChatId && !get().currentChat) { + await get().handleNewChatCreation(context.newChatId) + } + + // Process next message in queue if any + const nextInQueue = get().messageQueue[0] + if (nextInQueue) { + // Use originalMessageId if available (from edit/resend), otherwise use queue entry id + const messageIdToUse = nextInQueue.originalMessageId || nextInQueue.id + logger.info('[Queue] Processing next queued message', { + id: nextInQueue.id, + originalMessageId: nextInQueue.originalMessageId, + messageIdToUse, + queueLength: get().messageQueue.length, + }) + // Remove from queue and send + get().removeFromQueue(nextInQueue.id) + // Use setTimeout to avoid blocking the current execution + setTimeout(() => { + get().sendMessage(nextInQueue.content, { + stream: true, + fileAttachments: nextInQueue.fileAttachments, + contexts: nextInQueue.contexts, + messageId: messageIdToUse, + }) + }, 100) + } + + // Persist full message state (including contentBlocks), plan artifact, and config to database + const { currentChat, streamingPlanContent, mode, selectedModel } = get() + if (currentChat) { + try { + const currentMessages = get().messages + // Debug: Log what we're about to serialize + const lastMsg = currentMessages[currentMessages.length - 1] + if (lastMsg?.role === 'assistant') { + logger.info('[Stream Done] About to serialize - last message state', { + id: lastMsg.id, + contentLength: lastMsg.content?.length || 0, + hasContentBlocks: !!lastMsg.contentBlocks, + contentBlockCount: lastMsg.contentBlocks?.length || 0, + contentBlockTypes: (lastMsg.contentBlocks as any[])?.map((b) => b?.type) || [], + }) + } + const dbMessages = validateMessagesForLLM(currentMessages) + const config = { + mode, + model: selectedModel, + } + + const saveResponse = await fetch('/api/copilot/chat/update-messages', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + chatId: currentChat.id, + messages: dbMessages, + planArtifact: streamingPlanContent || null, + config, + }), + }) + + if (!saveResponse.ok) { + const errorText = await saveResponse.text().catch(() => '') + logger.error('[Stream Done] Failed to save messages to DB', { + status: saveResponse.status, + error: errorText, + }) + } else { + logger.info('[Stream Done] Successfully saved messages to DB', { + messageCount: dbMessages.length, + }) + } + + // Update local chat object with plan artifact and config + set({ + currentChat: { + ...currentChat, + planArtifact: streamingPlanContent || null, + config, + }, + }) + } catch (err) { + logger.error('[Stream Done] Exception saving messages', { error: String(err) }) + } + } + + // Post copilot_stats record (input/output tokens can be null for now) + try { + // Removed: stats sending now occurs only on accept/reject with minimal payload + } catch {} + + // Invalidate subscription queries to update usage + setTimeout(() => { + const queryClient = getQueryClient() + queryClient.invalidateQueries({ queryKey: subscriptionKeys.all }) + }, 1000) + } finally { + clearTimeout(timeoutId) + } + }, + + // Handle new chat creation from stream + handleNewChatCreation: async (newChatId: string) => { + const { mode, selectedModel, streamingPlanContent } = get() + const newChat: CopilotChat = { + id: newChatId, + title: null, + model: selectedModel, + messages: get().messages, + messageCount: get().messages.length, + planArtifact: streamingPlanContent || null, + config: { + mode, + model: selectedModel, + }, + createdAt: new Date(), + updatedAt: new Date(), + } + // Abort any in-progress tools and clear diff on new chat creation + abortAllInProgressTools(set, get) + try { + useWorkflowDiffStore.getState().clearDiff() + } catch {} + + set({ + currentChat: newChat, + chats: [newChat, ...(get().chats || [])], + chatsLastLoadedAt: null, + chatsLoadedForWorkflow: null, + planTodos: [], + showPlanTodos: false, + suppressAutoSelect: false, + }) + }, + + // Utilities + clearError: () => set({ error: null }), + clearSaveError: () => set({ saveError: null }), + clearCheckpointError: () => set({ checkpointError: null }), + retrySave: async (_chatId: string) => {}, + + cleanup: () => { + const { isSendingMessage } = get() + if (isSendingMessage) get().abortMessage() + if (streamingUpdateRAF !== null) { + cancelAnimationFrame(streamingUpdateRAF) + streamingUpdateRAF = null + } + streamingUpdateQueue.clear() + // Clear any diff on cleanup + try { + useWorkflowDiffStore.getState().clearDiff() + } catch {} + }, + + reset: () => { + get().cleanup() + // Abort in-progress tools prior to reset + abortAllInProgressTools(set, get) + set(initialState) + }, + + // Input controls + setInputValue: (value: string) => set({ inputValue: value }), + clearRevertState: () => set({ revertState: null }), + + // Todo list (UI only) + setPlanTodos: (todos) => set({ planTodos: todos, showPlanTodos: true }), + updatePlanTodoStatus: (id, status) => { + set((state) => { + const updated = state.planTodos.map((t) => + t.id === id + ? { ...t, completed: status === 'completed', executing: status === 'executing' } + : t + ) + return { planTodos: updated } + }) + }, + closePlanTodos: () => set({ showPlanTodos: false }), + + clearPlanArtifact: async () => { + const { currentChat } = get() + + // Clear from local state + set({ streamingPlanContent: '' }) + + // Update database if we have a current chat + if (currentChat) { + try { + const currentMessages = get().messages + const dbMessages = validateMessagesForLLM(currentMessages) + const { mode, selectedModel } = get() + + await fetch('/api/copilot/chat/update-messages', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + chatId: currentChat.id, + messages: dbMessages, + planArtifact: null, + config: { + mode, + model: selectedModel, + }, + }), + }) + + // Update local chat object + set({ + currentChat: { + ...currentChat, + planArtifact: null, + }, + }) + + logger.info('[PlanArtifact] Cleared plan artifact', { chatId: currentChat.id }) + } catch (error) { + logger.error('[PlanArtifact] Failed to clear plan artifact', error) + } + } + }, + + savePlanArtifact: async (content: string) => { + const { currentChat } = get() + + // Update local state + set({ streamingPlanContent: content }) + + // Update database if we have a current chat + if (currentChat) { + try { + const currentMessages = get().messages + const dbMessages = validateMessagesForLLM(currentMessages) + const { mode, selectedModel } = get() + + await fetch('/api/copilot/chat/update-messages', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + chatId: currentChat.id, + messages: dbMessages, + planArtifact: content, + config: { + mode, + model: selectedModel, + }, + }), + }) + + // Update local chat object + set({ + currentChat: { + ...currentChat, + planArtifact: content, + }, + }) + + logger.info('[PlanArtifact] Saved plan artifact', { + chatId: currentChat.id, + contentLength: content.length, + }) + } catch (error) { + logger.error('[PlanArtifact] Failed to save plan artifact', error) + } + } + }, + + setSelectedModel: async (model) => { + set({ selectedModel: model }) + }, + setAgentPrefetch: (prefetch) => set({ agentPrefetch: prefetch }), + setEnabledModels: (models) => set({ enabledModels: models }), + + executeIntegrationTool: async (toolCallId: string) => { + const { toolCallsById, workflowId } = get() + const toolCall = toolCallsById[toolCallId] + if (!toolCall || !workflowId) return + + const { id, name, params } = toolCall + + // Guard against double execution - skip if already executing or in terminal state + if (toolCall.state === ClientToolCallState.executing || isTerminalState(toolCall.state)) { + logger.info('[executeIntegrationTool] Skipping - already executing or terminal', { + id, + name, + state: toolCall.state, + }) + return + } + + // Set to executing state + const executingMap = { ...get().toolCallsById } + executingMap[id] = { + ...executingMap[id], + state: ClientToolCallState.executing, + display: resolveToolDisplay(name, ClientToolCallState.executing, id, params), + } + set({ toolCallsById: executingMap }) + logger.info('[toolCallsById] pending → executing (integration tool)', { id, name }) + + try { + const res = await fetch('/api/copilot/execute-tool', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + toolCallId: id, + toolName: name, + arguments: params || {}, + workflowId, + }), + }) + + const result = await res.json() + const success = result.success && result.result?.success + const completeMap = { ...get().toolCallsById } + + // Do not override terminal review/rejected + if ( + isRejectedState(completeMap[id]?.state) || + isReviewState(completeMap[id]?.state) || + isBackgroundState(completeMap[id]?.state) + ) { + return + } + + completeMap[id] = { + ...completeMap[id], + state: success ? ClientToolCallState.success : ClientToolCallState.error, + display: resolveToolDisplay( + name, + success ? ClientToolCallState.success : ClientToolCallState.error, + id, + params + ), + } + set({ toolCallsById: completeMap }) + logger.info(`[toolCallsById] executing → ${success ? 'success' : 'error'} (integration)`, { + id, + name, + result, + }) + + // Notify backend tool mark-complete endpoint + try { + await fetch('/api/copilot/tools/mark-complete', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + id, + name: name || 'unknown_tool', + status: success ? 200 : 500, + message: success + ? result.result?.output?.content + : result.result?.error || result.error || 'Tool execution failed', + data: success + ? result.result?.output + : { + error: result.result?.error || result.error, + output: result.result?.output, + }, + }), + }) + } catch {} + } catch (e) { + const errorMap = { ...get().toolCallsById } + // Do not override terminal review/rejected + if ( + isRejectedState(errorMap[id]?.state) || + isReviewState(errorMap[id]?.state) || + isBackgroundState(errorMap[id]?.state) + ) { + return + } + errorMap[id] = { + ...errorMap[id], + state: ClientToolCallState.error, + display: resolveToolDisplay(name, ClientToolCallState.error, id, params), + } + set({ toolCallsById: errorMap }) + logger.error('Integration tool execution failed', { id, name, error: e }) + } + }, + + skipIntegrationTool: (toolCallId: string) => { + const { toolCallsById } = get() + const toolCall = toolCallsById[toolCallId] + if (!toolCall) return + + const { id, name, params } = toolCall + + // Set to rejected state + const rejectedMap = { ...get().toolCallsById } + rejectedMap[id] = { + ...rejectedMap[id], + state: ClientToolCallState.rejected, + display: resolveToolDisplay(name, ClientToolCallState.rejected, id, params), + } + set({ toolCallsById: rejectedMap }) + logger.info('[toolCallsById] pending → rejected (integration tool skipped)', { id, name }) + + // Notify backend tool mark-complete endpoint with skip status + fetch('/api/copilot/tools/mark-complete', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + id, + name: name || 'unknown_tool', + status: 200, + message: 'Tool execution skipped by user', + data: { skipped: true }, + }), + }).catch(() => {}) + }, + + loadAutoAllowedTools: async () => { + try { + logger.info('[AutoAllowedTools] Loading from API...') + const res = await fetch('/api/copilot/auto-allowed-tools') + logger.info('[AutoAllowedTools] Load response', { status: res.status, ok: res.ok }) + if (res.ok) { + const data = await res.json() + const tools = data.autoAllowedTools || [] + set({ autoAllowedTools: tools }) + logger.info('[AutoAllowedTools] Loaded successfully', { count: tools.length, tools }) + } else { + logger.warn('[AutoAllowedTools] Load failed with status', { status: res.status }) + } + } catch (err) { + logger.error('[AutoAllowedTools] Failed to load', { error: err }) + } + }, + + addAutoAllowedTool: async (toolId: string) => { + try { + logger.info('[AutoAllowedTools] Adding tool...', { toolId }) + const res = await fetch('/api/copilot/auto-allowed-tools', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ toolId }), + }) + logger.info('[AutoAllowedTools] API response', { toolId, status: res.status, ok: res.ok }) + if (res.ok) { + const data = await res.json() + logger.info('[AutoAllowedTools] API returned', { toolId, tools: data.autoAllowedTools }) + set({ autoAllowedTools: data.autoAllowedTools || [] }) + logger.info('[AutoAllowedTools] Added tool to store', { toolId }) + + // Auto-execute all pending tools of the same type + const { toolCallsById, executeIntegrationTool } = get() + const pendingToolCalls = Object.values(toolCallsById).filter( + (tc) => tc.name === toolId && tc.state === ClientToolCallState.pending + ) + if (pendingToolCalls.length > 0) { + const isIntegrationTool = !CLASS_TOOL_METADATA[toolId] + logger.info('[AutoAllowedTools] Auto-executing pending tools', { + toolId, + count: pendingToolCalls.length, + isIntegrationTool, + }) + for (const tc of pendingToolCalls) { + if (isIntegrationTool) { + // Integration tools use executeIntegrationTool + executeIntegrationTool(tc.id).catch((err) => { + logger.error('[AutoAllowedTools] Auto-execute pending integration tool failed', { + toolCallId: tc.id, + toolId, + error: err, + }) + }) + } else { + // Client tools with interrupts use handleAccept + const inst = getClientTool(tc.id) as any + if (inst && typeof inst.handleAccept === 'function') { + Promise.resolve() + .then(() => inst.handleAccept(tc.params || {})) + .catch((err: any) => { + logger.error('[AutoAllowedTools] Auto-execute pending client tool failed', { + toolCallId: tc.id, + toolId, + error: err, + }) + }) + } + } + } + } + } + } catch (err) { + logger.error('[AutoAllowedTools] Failed to add tool', { toolId, error: err }) + } + }, + + removeAutoAllowedTool: async (toolId: string) => { + try { + const res = await fetch( + `/api/copilot/auto-allowed-tools?toolId=${encodeURIComponent(toolId)}`, + { + method: 'DELETE', + } + ) + if (res.ok) { + const data = await res.json() + set({ autoAllowedTools: data.autoAllowedTools || [] }) + logger.info('[AutoAllowedTools] Removed tool', { toolId }) + } + } catch (err) { + logger.error('[AutoAllowedTools] Failed to remove tool', { toolId, error: err }) + } + }, + + isToolAutoAllowed: (toolId: string) => { + const { autoAllowedTools } = get() + return autoAllowedTools.includes(toolId) + }, + + // Credential masking + loadSensitiveCredentialIds: async () => { + try { + const res = await fetch('/api/copilot/execute-copilot-server-tool', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ toolName: 'get_credentials', payload: {} }), + }) + if (!res.ok) { + logger.warn('[loadSensitiveCredentialIds] Failed to fetch credentials', { + status: res.status, + }) + return + } + const json = await res.json() + // Credentials are at result.oauth.connected.credentials + const credentials = json?.result?.oauth?.connected?.credentials || [] + logger.info('[loadSensitiveCredentialIds] Response', { + hasResult: !!json?.result, + credentialCount: credentials.length, + }) + const ids = new Set() + for (const cred of credentials) { + if (cred?.id) { + ids.add(cred.id) + } + } + set({ sensitiveCredentialIds: ids }) + logger.info('[loadSensitiveCredentialIds] Loaded credential IDs', { + count: ids.size, + }) + } catch (err) { + logger.warn('[loadSensitiveCredentialIds] Error loading credentials', err) + } + }, + + maskCredentialValue: (value: string) => { + const { sensitiveCredentialIds } = get() + if (!value || sensitiveCredentialIds.size === 0) return value + + let masked = value + // Sort by length descending to mask longer IDs first + const sortedIds = Array.from(sensitiveCredentialIds).sort((a, b) => b.length - a.length) + for (const id of sortedIds) { + if (id && masked.includes(id)) { + masked = masked.split(id).join('••••••••') + } + } + return masked + }, + + // Message queue actions + addToQueue: (message, options) => { + const queuedMessage: import('./types').QueuedMessage = { + id: crypto.randomUUID(), + content: message, + fileAttachments: options?.fileAttachments, + contexts: options?.contexts, + queuedAt: Date.now(), + originalMessageId: options?.messageId, + } + set({ messageQueue: [...get().messageQueue, queuedMessage] }) + logger.info('[Queue] Message added to queue', { + id: queuedMessage.id, + originalMessageId: options?.messageId, + queueLength: get().messageQueue.length, + }) + }, + + removeFromQueue: (id) => { + set({ messageQueue: get().messageQueue.filter((m) => m.id !== id) }) + logger.info('[Queue] Message removed from queue', { + id, + queueLength: get().messageQueue.length, + }) + }, + + moveUpInQueue: (id) => { + const queue = [...get().messageQueue] + const index = queue.findIndex((m) => m.id === id) + if (index > 0) { + const item = queue[index] + queue.splice(index, 1) + queue.splice(index - 1, 0, item) + set({ messageQueue: queue }) + logger.info('[Queue] Message moved up in queue', { id, newIndex: index - 1 }) + } + }, + + sendNow: async (id) => { + const queue = get().messageQueue + const message = queue.find((m) => m.id === id) + if (!message) return + + // Remove from queue first + get().removeFromQueue(id) + + // If currently sending, abort and send this one + const { isSendingMessage } = get() + if (isSendingMessage) { + get().abortMessage({ suppressContinueOption: true }) + // Wait a tick for abort to complete + await new Promise((resolve) => setTimeout(resolve, 50)) + } + + // Use originalMessageId if available (from edit/resend), otherwise use queue entry id + const messageIdToUse = message.originalMessageId || message.id + + // Send the message + await get().sendMessage(message.content, { + stream: true, + fileAttachments: message.fileAttachments, + contexts: message.contexts, + messageId: messageIdToUse, + }) + }, + + clearQueue: () => { + set({ messageQueue: [] }) + logger.info('[Queue] Queue cleared') + }, + })) +) + +// Sync class-based tool instance state changes back into the store map +try { + registerToolStateSync((toolCallId: string, nextState: any) => { + const state = useCopilotStore.getState() + const current = state.toolCallsById[toolCallId] + if (!current) return + let mapped: ClientToolCallState = current.state + if (nextState === 'executing') mapped = ClientToolCallState.executing + else if (nextState === 'pending') mapped = ClientToolCallState.pending + else if (nextState === 'success' || nextState === 'accepted') + mapped = ClientToolCallState.success + else if (nextState === 'error' || nextState === 'errored') mapped = ClientToolCallState.error + else if (nextState === 'rejected') mapped = ClientToolCallState.rejected + else if (nextState === 'aborted') mapped = ClientToolCallState.aborted + else if (nextState === 'review') mapped = (ClientToolCallState as any).review + else if (nextState === 'background') mapped = (ClientToolCallState as any).background + else if (typeof nextState === 'number') mapped = nextState as unknown as ClientToolCallState + + // Store-authoritative gating: ignore invalid/downgrade transitions + const isTerminal = (s: ClientToolCallState) => + s === ClientToolCallState.success || + s === ClientToolCallState.error || + s === ClientToolCallState.rejected || + s === ClientToolCallState.aborted || + (s as any) === (ClientToolCallState as any).review || + (s as any) === (ClientToolCallState as any).background + + // If we've already reached a terminal state, ignore any further non-terminal updates + if (isTerminal(current.state) && !isTerminal(mapped)) { + return + } + // Prevent downgrades (executing → pending, pending → generating) + if ( + (current.state === ClientToolCallState.executing && mapped === ClientToolCallState.pending) || + (current.state === ClientToolCallState.pending && + mapped === (ClientToolCallState as any).generating) + ) { + return + } + // No-op if unchanged + if (mapped === current.state) return + const updated = { + ...state.toolCallsById, + [toolCallId]: { + ...current, + state: mapped, + display: resolveToolDisplay(current.name, mapped, toolCallId, current.params), + }, + } + useCopilotStore.setState({ toolCallsById: updated }) + }) +} catch {} diff --git a/.tmp_old_tools_registry.ts b/.tmp_old_tools_registry.ts new file mode 100644 index 0000000000..b6941d4ae0 --- /dev/null +++ b/.tmp_old_tools_registry.ts @@ -0,0 +1,3480 @@ +import { + a2aCancelTaskTool, + a2aDeletePushNotificationTool, + a2aGetAgentCardTool, + a2aGetPushNotificationTool, + a2aGetTaskTool, + a2aResubscribeTool, + a2aSendMessageTool, + a2aSetPushNotificationTool, +} from '@/tools/a2a' +import { + ahrefsBacklinksStatsTool, + ahrefsBacklinksTool, + ahrefsBrokenBacklinksTool, + ahrefsDomainRatingTool, + ahrefsKeywordOverviewTool, + ahrefsOrganicKeywordsTool, + ahrefsReferringDomainsTool, + ahrefsTopPagesTool, +} from '@/tools/ahrefs' +import { + airtableCreateRecordsTool, + airtableGetRecordTool, + airtableListRecordsTool, + airtableUpdateRecordTool, +} from '@/tools/airtable' +import { airweaveSearchTool } from '@/tools/airweave' +import { apifyRunActorAsyncTool, apifyRunActorSyncTool } from '@/tools/apify' +import { + apolloAccountBulkCreateTool, + apolloAccountBulkUpdateTool, + apolloAccountCreateTool, + apolloAccountSearchTool, + apolloAccountUpdateTool, + apolloContactBulkCreateTool, + apolloContactBulkUpdateTool, + apolloContactCreateTool, + apolloContactSearchTool, + apolloContactUpdateTool, + apolloEmailAccountsTool, + apolloOpportunityCreateTool, + apolloOpportunityGetTool, + apolloOpportunitySearchTool, + apolloOpportunityUpdateTool, + apolloOrganizationBulkEnrichTool, + apolloOrganizationEnrichTool, + apolloOrganizationSearchTool, + apolloPeopleBulkEnrichTool, + apolloPeopleEnrichTool, + apolloPeopleSearchTool, + apolloSequenceAddContactsTool, + apolloSequenceSearchTool, + apolloTaskCreateTool, + apolloTaskSearchTool, +} from '@/tools/apollo' +import { arxivGetAuthorPapersTool, arxivGetPaperTool, arxivSearchTool } from '@/tools/arxiv' +import { + asanaAddCommentTool, + asanaCreateTaskTool, + asanaGetProjectsTool, + asanaGetTaskTool, + asanaSearchTasksTool, + asanaUpdateTaskTool, +} from '@/tools/asana' +import { browserUseRunTaskTool } from '@/tools/browser_use' +import { + calcomCancelBookingTool, + calcomConfirmBookingTool, + calcomCreateBookingTool, + calcomCreateEventTypeTool, + calcomCreateScheduleTool, + calcomDeclineBookingTool, + calcomDeleteEventTypeTool, + calcomDeleteScheduleTool, + calcomGetBookingTool, + calcomGetDefaultScheduleTool, + calcomGetEventTypeTool, + calcomGetScheduleTool, + calcomGetSlotsTool, + calcomListBookingsTool, + calcomListEventTypesTool, + calcomListSchedulesTool, + calcomRescheduleBookingTool, + calcomUpdateEventTypeTool, + calcomUpdateScheduleTool, +} from '@/tools/calcom' +import { + calendlyCancelEventTool, + calendlyCreateWebhookTool, + calendlyDeleteWebhookTool, + calendlyGetCurrentUserTool, + calendlyGetEventTypeTool, + calendlyGetScheduledEventTool, + calendlyListEventInviteesTool, + calendlyListEventTypesTool, + calendlyListScheduledEventsTool, + calendlyListWebhooksTool, +} from '@/tools/calendly' +import { clayPopulateTool } from '@/tools/clay' +import { + clerkCreateOrganizationTool, + clerkCreateUserTool, + clerkDeleteUserTool, + clerkGetOrganizationTool, + clerkGetSessionTool, + clerkGetUserTool, + clerkListOrganizationsTool, + clerkListSessionsTool, + clerkListUsersTool, + clerkRevokeSessionTool, + clerkUpdateUserTool, +} from '@/tools/clerk' +import { + confluenceAddLabelTool, + confluenceCreateBlogPostTool, + confluenceCreateCommentTool, + confluenceCreatePagePropertyTool, + confluenceCreatePageTool, + confluenceDeleteAttachmentTool, + confluenceDeleteCommentTool, + confluenceDeletePageTool, + confluenceGetBlogPostTool, + confluenceGetPageAncestorsTool, + confluenceGetPageChildrenTool, + confluenceGetPageVersionTool, + confluenceGetSpaceTool, + confluenceListAttachmentsTool, + confluenceListBlogPostsInSpaceTool, + confluenceListBlogPostsTool, + confluenceListCommentsTool, + confluenceListLabelsTool, + confluenceListPagePropertiesTool, + confluenceListPagesInSpaceTool, + confluenceListPageVersionsTool, + confluenceListSpacesTool, + confluenceRetrieveTool, + confluenceSearchInSpaceTool, + confluenceSearchTool, + confluenceUpdateCommentTool, + confluenceUpdateTool, + confluenceUploadAttachmentTool, +} from '@/tools/confluence' +import { + cursorAddFollowupTool, + cursorAddFollowupV2Tool, + cursorDeleteAgentTool, + cursorDeleteAgentV2Tool, + cursorGetAgentTool, + cursorGetAgentV2Tool, + cursorGetConversationTool, + cursorGetConversationV2Tool, + cursorLaunchAgentTool, + cursorLaunchAgentV2Tool, + cursorListAgentsTool, + cursorListAgentsV2Tool, + cursorStopAgentTool, + cursorStopAgentV2Tool, +} from '@/tools/cursor' +import { + datadogCancelDowntimeTool, + datadogCreateDowntimeTool, + datadogCreateEventTool, + datadogCreateMonitorTool, + datadogGetMonitorTool, + datadogListDowntimesTool, + datadogListMonitorsTool, + datadogMuteMonitorTool, + datadogQueryLogsTool, + datadogQueryTimeseriesTool, + datadogSendLogsTool, + datadogSubmitMetricsTool, +} from '@/tools/datadog' +import { + discordAddReactionTool, + discordArchiveThreadTool, + discordAssignRoleTool, + discordBanMemberTool, + discordCreateChannelTool, + discordCreateInviteTool, + discordCreateRoleTool, + discordCreateThreadTool, + discordCreateWebhookTool, + discordDeleteChannelTool, + discordDeleteInviteTool, + discordDeleteMessageTool, + discordDeleteRoleTool, + discordDeleteWebhookTool, + discordEditMessageTool, + discordExecuteWebhookTool, + discordGetChannelTool, + discordGetInviteTool, + discordGetMemberTool, + discordGetMessagesTool, + discordGetServerTool, + discordGetUserTool, + discordGetWebhookTool, + discordJoinThreadTool, + discordKickMemberTool, + discordLeaveThreadTool, + discordPinMessageTool, + discordRemoveReactionTool, + discordRemoveRoleTool, + discordSendMessageTool, + discordUnbanMemberTool, + discordUnpinMessageTool, + discordUpdateChannelTool, + discordUpdateMemberTool, + discordUpdateRoleTool, +} from '@/tools/discord' +import { + dropboxCopyTool, + dropboxCreateFolderTool, + dropboxCreateSharedLinkTool, + dropboxDeleteTool, + dropboxDownloadTool, + dropboxGetMetadataTool, + dropboxListFolderTool, + dropboxMoveTool, + dropboxSearchTool, + dropboxUploadTool, +} from '@/tools/dropbox' +import { chainOfThoughtTool, predictTool, reactTool } from '@/tools/dspy' +import { duckduckgoSearchTool } from '@/tools/duckduckgo' +import { + dynamodbDeleteTool, + dynamodbGetTool, + dynamodbIntrospectTool, + dynamodbPutTool, + dynamodbQueryTool, + dynamodbScanTool, + dynamodbUpdateTool, +} from '@/tools/dynamodb' +import { + elasticsearchBulkTool, + elasticsearchClusterHealthTool, + elasticsearchClusterStatsTool, + elasticsearchCountTool, + elasticsearchCreateIndexTool, + elasticsearchDeleteDocumentTool, + elasticsearchDeleteIndexTool, + elasticsearchGetDocumentTool, + elasticsearchGetIndexTool, + elasticsearchIndexDocumentTool, + elasticsearchListIndicesTool, + elasticsearchSearchTool, + elasticsearchUpdateDocumentTool, +} from '@/tools/elasticsearch' +import { elevenLabsTtsTool } from '@/tools/elevenlabs' +import { + enrichCheckCreditsTool, + enrichCompanyFundingTool, + enrichCompanyLookupTool, + enrichCompanyRevenueTool, + enrichDisposableEmailCheckTool, + enrichEmailToIpTool, + enrichEmailToPersonLiteTool, + enrichEmailToPhoneTool, + enrichEmailToProfileTool, + enrichFindEmailTool, + enrichGetPostDetailsTool, + enrichIpToCompanyTool, + enrichLinkedInProfileTool, + enrichLinkedInToPersonalEmailTool, + enrichLinkedInToWorkEmailTool, + enrichPhoneFinderTool, + enrichReverseHashLookupTool, + enrichSalesPointerPeopleTool, + enrichSearchCompanyActivitiesTool, + enrichSearchCompanyEmployeesTool, + enrichSearchCompanyTool, + enrichSearchLogoTool, + enrichSearchPeopleActivitiesTool, + enrichSearchPeopleTool, + enrichSearchPostCommentsTool, + enrichSearchPostReactionsTool, + enrichSearchPostsTool, + enrichSearchSimilarCompaniesTool, + enrichVerifyEmailTool, +} from '@/tools/enrich' +import { + exaAnswerTool, + exaFindSimilarLinksTool, + exaGetContentsTool, + exaResearchTool, + exaSearchTool, +} from '@/tools/exa' +import { fileParserV2Tool, fileParserV3Tool, fileParseTool } from '@/tools/file' +import { + firecrawlAgentTool, + firecrawlCrawlTool, + firecrawlExtractTool, + firecrawlMapTool, + firecrawlScrapeTool, + firecrawlSearchTool, +} from '@/tools/firecrawl' +import { + firefliesAddToLiveMeetingTool, + firefliesCreateBiteTool, + firefliesDeleteTranscriptTool, + firefliesGetTranscriptTool, + firefliesGetUserTool, + firefliesListBitesTool, + firefliesListContactsTool, + firefliesListTranscriptsTool, + firefliesListUsersTool, + firefliesUploadAudioTool, +} from '@/tools/fireflies' +import { functionExecuteTool } from '@/tools/function' +import { + githubAddAssigneesTool, + githubAddAssigneesV2Tool, + githubAddLabelsTool, + githubAddLabelsV2Tool, + githubCancelWorkflowRunTool, + githubCancelWorkflowRunV2Tool, + githubCheckStarTool, + githubCheckStarV2Tool, + githubCloseIssueTool, + githubCloseIssueV2Tool, + githubClosePRTool, + githubClosePRV2Tool, + githubCommentTool, + githubCommentV2Tool, + githubCompareCommitsTool, + githubCompareCommitsV2Tool, + githubCreateBranchTool, + githubCreateBranchV2Tool, + githubCreateCommentReactionTool, + githubCreateCommentReactionV2Tool, + githubCreateFileTool, + githubCreateFileV2Tool, + githubCreateGistTool, + githubCreateGistV2Tool, + githubCreateIssueReactionTool, + githubCreateIssueReactionV2Tool, + githubCreateIssueTool, + githubCreateIssueV2Tool, + githubCreateMilestoneTool, + githubCreateMilestoneV2Tool, + githubCreatePRTool, + githubCreatePRV2Tool, + githubCreateProjectTool, + githubCreateProjectV2Tool, + githubCreateReleaseTool, + githubCreateReleaseV2Tool, + githubDeleteBranchTool, + githubDeleteBranchV2Tool, + githubDeleteCommentReactionTool, + githubDeleteCommentReactionV2Tool, + githubDeleteCommentTool, + githubDeleteCommentV2Tool, + githubDeleteFileTool, + githubDeleteFileV2Tool, + githubDeleteGistTool, + githubDeleteGistV2Tool, + githubDeleteIssueReactionTool, + githubDeleteIssueReactionV2Tool, + githubDeleteMilestoneTool, + githubDeleteMilestoneV2Tool, + githubDeleteProjectTool, + githubDeleteProjectV2Tool, + githubDeleteReleaseTool, + githubDeleteReleaseV2Tool, + githubForkGistTool, + githubForkGistV2Tool, + githubForkRepoTool, + githubForkRepoV2Tool, + githubGetBranchProtectionTool, + githubGetBranchProtectionV2Tool, + githubGetBranchTool, + githubGetBranchV2Tool, + githubGetCommitTool, + githubGetCommitV2Tool, + githubGetFileContentTool, + githubGetFileContentV2Tool, + githubGetGistTool, + githubGetGistV2Tool, + githubGetIssueTool, + githubGetIssueV2Tool, + githubGetMilestoneTool, + githubGetMilestoneV2Tool, + githubGetPRFilesTool, + githubGetPRFilesV2Tool, + githubGetProjectTool, + githubGetProjectV2Tool, + githubGetReleaseTool, + githubGetReleaseV2Tool, + githubGetTreeTool, + githubGetTreeV2Tool, + githubGetWorkflowRunTool, + githubGetWorkflowRunV2Tool, + githubGetWorkflowTool, + githubGetWorkflowV2Tool, + githubIssueCommentTool, + githubIssueCommentV2Tool, + githubLatestCommitTool, + githubLatestCommitV2Tool, + githubListBranchesTool, + githubListBranchesV2Tool, + githubListCommitsTool, + githubListCommitsV2Tool, + githubListForksTool, + githubListForksV2Tool, + githubListGistsTool, + githubListGistsV2Tool, + githubListIssueCommentsTool, + githubListIssueCommentsV2Tool, + githubListIssuesTool, + githubListIssuesV2Tool, + githubListMilestonesTool, + githubListMilestonesV2Tool, + githubListPRCommentsTool, + githubListPRCommentsV2Tool, + githubListPRsTool, + githubListPRsV2Tool, + githubListProjectsTool, + githubListProjectsV2Tool, + githubListReleasesTool, + githubListReleasesV2Tool, + githubListStargazersTool, + githubListStargazersV2Tool, + githubListWorkflowRunsTool, + githubListWorkflowRunsV2Tool, + githubListWorkflowsTool, + githubListWorkflowsV2Tool, + githubMergePRTool, + githubMergePRV2Tool, + githubPrTool, + githubPrV2Tool, + githubRemoveLabelTool, + githubRemoveLabelV2Tool, + githubRepoInfoTool, + githubRepoInfoV2Tool, + githubRequestReviewersTool, + githubRequestReviewersV2Tool, + githubRerunWorkflowTool, + githubRerunWorkflowV2Tool, + githubSearchCodeTool, + githubSearchCodeV2Tool, + githubSearchCommitsTool, + githubSearchCommitsV2Tool, + githubSearchIssuesTool, + githubSearchIssuesV2Tool, + githubSearchReposTool, + githubSearchReposV2Tool, + githubSearchUsersTool, + githubSearchUsersV2Tool, + githubStarGistTool, + githubStarGistV2Tool, + githubStarRepoTool, + githubStarRepoV2Tool, + githubTriggerWorkflowTool, + githubTriggerWorkflowV2Tool, + githubUnstarGistTool, + githubUnstarGistV2Tool, + githubUnstarRepoTool, + githubUnstarRepoV2Tool, + githubUpdateBranchProtectionTool, + githubUpdateBranchProtectionV2Tool, + githubUpdateCommentTool, + githubUpdateCommentV2Tool, + githubUpdateFileTool, + githubUpdateFileV2Tool, + githubUpdateGistTool, + githubUpdateGistV2Tool, + githubUpdateIssueTool, + githubUpdateIssueV2Tool, + githubUpdateMilestoneTool, + githubUpdateMilestoneV2Tool, + githubUpdatePRTool, + githubUpdatePRV2Tool, + githubUpdateProjectTool, + githubUpdateProjectV2Tool, + githubUpdateReleaseTool, + githubUpdateReleaseV2Tool, +} from '@/tools/github' +import { + gitlabCancelPipelineTool, + gitlabCreateIssueNoteTool, + gitlabCreateIssueTool, + gitlabCreateMergeRequestNoteTool, + gitlabCreateMergeRequestTool, + gitlabCreatePipelineTool, + gitlabDeleteIssueTool, + gitlabGetIssueTool, + gitlabGetMergeRequestTool, + gitlabGetPipelineTool, + gitlabGetProjectTool, + gitlabListIssuesTool, + gitlabListMergeRequestsTool, + gitlabListPipelinesTool, + gitlabListProjectsTool, + gitlabMergeMergeRequestTool, + gitlabRetryPipelineTool, + gitlabUpdateIssueTool, + gitlabUpdateMergeRequestTool, +} from '@/tools/gitlab' +import { + gmailAddLabelTool, + gmailAddLabelV2Tool, + gmailArchiveTool, + gmailArchiveV2Tool, + gmailDeleteTool, + gmailDeleteV2Tool, + gmailDraftTool, + gmailDraftV2Tool, + gmailMarkReadTool, + gmailMarkReadV2Tool, + gmailMarkUnreadTool, + gmailMarkUnreadV2Tool, + gmailMoveTool, + gmailMoveV2Tool, + gmailReadTool, + gmailReadV2Tool, + gmailRemoveLabelTool, + gmailRemoveLabelV2Tool, + gmailSearchTool, + gmailSearchV2Tool, + gmailSendTool, + gmailSendV2Tool, + gmailUnarchiveTool, + gmailUnarchiveV2Tool, +} from '@/tools/gmail' +import { googleSearchTool } from '@/tools/google' +import { + googleCalendarCreateTool, + googleCalendarCreateV2Tool, + googleCalendarDeleteTool, + googleCalendarDeleteV2Tool, + googleCalendarGetTool, + googleCalendarGetV2Tool, + googleCalendarInstancesTool, + googleCalendarInstancesV2Tool, + googleCalendarInviteTool, + googleCalendarInviteV2Tool, + googleCalendarListCalendarsTool, + googleCalendarListCalendarsV2Tool, + googleCalendarListTool, + googleCalendarListV2Tool, + googleCalendarMoveTool, + googleCalendarMoveV2Tool, + googleCalendarQuickAddTool, + googleCalendarQuickAddV2Tool, + googleCalendarUpdateTool, + googleCalendarUpdateV2Tool, +} from '@/tools/google_calendar' +import { googleDocsCreateTool, googleDocsReadTool, googleDocsWriteTool } from '@/tools/google_docs' +import { + googleDriveCopyTool, + googleDriveCreateFolderTool, + googleDriveDeleteTool, + googleDriveDownloadTool, + googleDriveGetAboutTool, + googleDriveGetContentTool, + googleDriveGetFileTool, + googleDriveListPermissionsTool, + googleDriveListTool, + googleDriveShareTool, + googleDriveTrashTool, + googleDriveUnshareTool, + googleDriveUntrashTool, + googleDriveUpdateTool, + googleDriveUploadTool, +} from '@/tools/google_drive' +import { + googleFormsBatchUpdateTool, + googleFormsCreateFormTool, + googleFormsCreateWatchTool, + googleFormsDeleteWatchTool, + googleFormsGetFormTool, + googleFormsGetResponsesTool, + googleFormsListWatchesTool, + googleFormsRenewWatchTool, + googleFormsSetPublishSettingsTool, +} from '@/tools/google_forms' +import { + googleGroupsAddAliasTool, + googleGroupsAddMemberTool, + googleGroupsCreateGroupTool, + googleGroupsDeleteGroupTool, + googleGroupsGetGroupTool, + googleGroupsGetMemberTool, + googleGroupsGetSettingsTool, + googleGroupsHasMemberTool, + googleGroupsListAliasesTool, + googleGroupsListGroupsTool, + googleGroupsListMembersTool, + googleGroupsRemoveAliasTool, + googleGroupsRemoveMemberTool, + googleGroupsUpdateGroupTool, + googleGroupsUpdateMemberTool, + googleGroupsUpdateSettingsTool, +} from '@/tools/google_groups' +import { + googleMapsAirQualityTool, + googleMapsDirectionsTool, + googleMapsDistanceMatrixTool, + googleMapsElevationTool, + googleMapsGeocodeTool, + googleMapsGeolocateTool, + googleMapsPlaceDetailsTool, + googleMapsPlacesSearchTool, + googleMapsReverseGeocodeTool, + googleMapsSnapToRoadsTool, + googleMapsSpeedLimitsTool, + googleMapsTimezoneTool, + googleMapsValidateAddressTool, +} from '@/tools/google_maps' +import { + googleSheetsAppendTool, + googleSheetsAppendV2Tool, + googleSheetsBatchClearV2Tool, + googleSheetsBatchGetV2Tool, + googleSheetsBatchUpdateV2Tool, + googleSheetsClearV2Tool, + googleSheetsCopySheetV2Tool, + googleSheetsCreateSpreadsheetV2Tool, + googleSheetsGetSpreadsheetV2Tool, + googleSheetsReadTool, + googleSheetsReadV2Tool, + googleSheetsUpdateTool, + googleSheetsUpdateV2Tool, + googleSheetsWriteTool, + googleSheetsWriteV2Tool, +} from '@/tools/google_sheets' +import { + googleSlidesAddImageTool, + googleSlidesAddSlideTool, + googleSlidesCreateShapeTool, + googleSlidesCreateTableTool, + googleSlidesCreateTool, + googleSlidesDeleteObjectTool, + googleSlidesDuplicateObjectTool, + googleSlidesGetPageTool, + googleSlidesGetThumbnailTool, + googleSlidesInsertTextTool, + googleSlidesReadTool, + googleSlidesReplaceAllTextTool, + googleSlidesUpdateSlidesPositionTool, + googleSlidesWriteTool, +} from '@/tools/google_slides' +import { + createMattersExportTool, + createMattersHoldsTool, + createMattersTool, + downloadExportFileTool, + listMattersExportTool, + listMattersHoldsTool, + listMattersTool, +} from '@/tools/google_vault' +import { + grafanaCreateAlertRuleTool, + grafanaCreateAnnotationTool, + grafanaCreateDashboardTool, + grafanaCreateFolderTool, + grafanaDeleteAlertRuleTool, + grafanaDeleteAnnotationTool, + grafanaDeleteDashboardTool, + grafanaGetAlertRuleTool, + grafanaGetDashboardTool, + grafanaGetDataSourceTool, + grafanaListAlertRulesTool, + grafanaListAnnotationsTool, + grafanaListContactPointsTool, + grafanaListDashboardsTool, + grafanaListDataSourcesTool, + grafanaListFoldersTool, + grafanaUpdateAlertRuleTool, + grafanaUpdateAnnotationTool, + grafanaUpdateDashboardTool, +} from '@/tools/grafana' +import { + grainCreateHookTool, + grainDeleteHookTool, + grainGetRecordingTool, + grainGetTranscriptTool, + grainListHooksTool, + grainListMeetingTypesTool, + grainListRecordingsTool, + grainListTeamsTool, +} from '@/tools/grain' +import { + greptileIndexRepoTool, + greptileQueryTool, + greptileSearchTool, + greptileStatusTool, +} from '@/tools/greptile' +import { guardrailsValidateTool } from '@/tools/guardrails' +import { httpRequestTool, webhookRequestTool } from '@/tools/http' +import { + hubspotCreateCompanyTool, + hubspotCreateContactTool, + hubspotGetCompanyTool, + hubspotGetContactTool, + hubspotGetUsersTool, + hubspotListCompaniesTool, + hubspotListContactsTool, + hubspotListDealsTool, + hubspotSearchCompaniesTool, + hubspotSearchContactsTool, + hubspotUpdateCompanyTool, + hubspotUpdateContactTool, +} from '@/tools/hubspot' +import { huggingfaceChatTool } from '@/tools/huggingface' +import { + hunterCompaniesFindTool, + hunterDiscoverTool, + hunterDomainSearchTool, + hunterEmailCountTool, + hunterEmailFinderTool, + hunterEmailVerifierTool, +} from '@/tools/hunter' +import { + incidentioActionsListTool, + incidentioActionsShowTool, + incidentioCustomFieldsCreateTool, + incidentioCustomFieldsDeleteTool, + incidentioCustomFieldsListTool, + incidentioCustomFieldsShowTool, + incidentioCustomFieldsUpdateTool, + incidentioEscalationPathsCreateTool, + incidentioEscalationPathsDeleteTool, + incidentioEscalationPathsShowTool, + incidentioEscalationPathsUpdateTool, + incidentioEscalationsCreateTool, + incidentioEscalationsListTool, + incidentioEscalationsShowTool, + incidentioFollowUpsListTool, + incidentioFollowUpsShowTool, + incidentioIncidentRolesCreateTool, + incidentioIncidentRolesDeleteTool, + incidentioIncidentRolesListTool, + incidentioIncidentRolesShowTool, + incidentioIncidentRolesUpdateTool, + incidentioIncidentStatusesListTool, + incidentioIncidentsCreateTool, + incidentioIncidentsListTool, + incidentioIncidentsShowTool, + incidentioIncidentsUpdateTool, + incidentioIncidentTimestampsListTool, + incidentioIncidentTimestampsShowTool, + incidentioIncidentTypesListTool, + incidentioIncidentUpdatesListTool, + incidentioScheduleEntriesListTool, + incidentioScheduleOverridesCreateTool, + incidentioSchedulesCreateTool, + incidentioSchedulesDeleteTool, + incidentioSchedulesListTool, + incidentioSchedulesShowTool, + incidentioSchedulesUpdateTool, + incidentioSeveritiesListTool, + incidentioUsersListTool, + incidentioUsersShowTool, + incidentioWorkflowsCreateTool, + incidentioWorkflowsDeleteTool, + incidentioWorkflowsListTool, + incidentioWorkflowsShowTool, + incidentioWorkflowsUpdateTool, +} from '@/tools/incidentio' +import { + intercomAssignConversationV2Tool, + intercomAttachContactToCompanyV2Tool, + intercomCloseConversationV2Tool, + intercomCreateCompanyTool, + intercomCreateCompanyV2Tool, + intercomCreateContactTool, + intercomCreateContactV2Tool, + intercomCreateEventV2Tool, + intercomCreateMessageTool, + intercomCreateMessageV2Tool, + intercomCreateNoteV2Tool, + intercomCreateTagV2Tool, + intercomCreateTicketTool, + intercomCreateTicketV2Tool, + intercomDeleteContactTool, + intercomDeleteContactV2Tool, + intercomDetachContactFromCompanyV2Tool, + intercomGetCompanyTool, + intercomGetCompanyV2Tool, + intercomGetContactTool, + intercomGetContactV2Tool, + intercomGetConversationTool, + intercomGetConversationV2Tool, + intercomGetTicketTool, + intercomGetTicketV2Tool, + intercomListAdminsV2Tool, + intercomListCompaniesTool, + intercomListCompaniesV2Tool, + intercomListContactsTool, + intercomListContactsV2Tool, + intercomListConversationsTool, + intercomListConversationsV2Tool, + intercomListTagsV2Tool, + intercomOpenConversationV2Tool, + intercomReplyConversationTool, + intercomReplyConversationV2Tool, + intercomSearchContactsTool, + intercomSearchContactsV2Tool, + intercomSearchConversationsTool, + intercomSearchConversationsV2Tool, + intercomSnoozeConversationV2Tool, + intercomTagContactV2Tool, + intercomTagConversationV2Tool, + intercomUntagContactV2Tool, + intercomUpdateContactTool, + intercomUpdateContactV2Tool, + intercomUpdateTicketV2Tool, +} from '@/tools/intercom' +import { jinaReadUrlTool, jinaSearchTool } from '@/tools/jina' +import { + jiraAddAttachmentTool, + jiraAddCommentTool, + jiraAddWatcherTool, + jiraAddWorklogTool, + jiraAssignIssueTool, + jiraBulkRetrieveTool, + jiraCreateIssueLinkTool, + jiraDeleteAttachmentTool, + jiraDeleteCommentTool, + jiraDeleteIssueLinkTool, + jiraDeleteIssueTool, + jiraDeleteWorklogTool, + jiraGetAttachmentsTool, + jiraGetCommentsTool, + jiraGetUsersTool, + jiraGetWorklogsTool, + jiraRemoveWatcherTool, + jiraRetrieveTool, + jiraSearchIssuesTool, + jiraTransitionIssueTool, + jiraUpdateCommentTool, + jiraUpdateTool, + jiraUpdateWorklogTool, + jiraWriteTool, +} from '@/tools/jira' +import { + jsmAddCommentTool, + jsmAddCustomerTool, + jsmAddOrganizationTool, + jsmAddParticipantsTool, + jsmAnswerApprovalTool, + jsmCreateOrganizationTool, + jsmCreateRequestTool, + jsmGetApprovalsTool, + jsmGetCommentsTool, + jsmGetCustomersTool, + jsmGetOrganizationsTool, + jsmGetParticipantsTool, + jsmGetQueuesTool, + jsmGetRequestsTool, + jsmGetRequestTool, + jsmGetRequestTypesTool, + jsmGetServiceDesksTool, + jsmGetSlaTool, + jsmGetTransitionsTool, + jsmTransitionRequestTool, +} from '@/tools/jsm' +import { + kalshiAmendOrderTool, + kalshiAmendOrderV2Tool, + kalshiCancelOrderTool, + kalshiCancelOrderV2Tool, + kalshiCreateOrderTool, + kalshiCreateOrderV2Tool, + kalshiGetBalanceTool, + kalshiGetBalanceV2Tool, + kalshiGetCandlesticksTool, + kalshiGetCandlesticksV2Tool, + kalshiGetEventsTool, + kalshiGetEventsV2Tool, + kalshiGetEventTool, + kalshiGetEventV2Tool, + kalshiGetExchangeStatusTool, + kalshiGetExchangeStatusV2Tool, + kalshiGetFillsTool, + kalshiGetFillsV2Tool, + kalshiGetMarketsTool, + kalshiGetMarketsV2Tool, + kalshiGetMarketTool, + kalshiGetMarketV2Tool, + kalshiGetOrderbookTool, + kalshiGetOrderbookV2Tool, + kalshiGetOrdersTool, + kalshiGetOrdersV2Tool, + kalshiGetOrderTool, + kalshiGetOrderV2Tool, + kalshiGetPositionsTool, + kalshiGetPositionsV2Tool, + kalshiGetSeriesByTickerTool, + kalshiGetSeriesByTickerV2Tool, + kalshiGetTradesTool, + kalshiGetTradesV2Tool, +} from '@/tools/kalshi' +import { + knowledgeCreateDocumentTool, + knowledgeSearchTool, + knowledgeUploadChunkTool, +} from '@/tools/knowledge' +import { langsmithCreateRunsBatchTool, langsmithCreateRunTool } from '@/tools/langsmith' +import { lemlistGetActivitiesTool, lemlistGetLeadTool, lemlistSendEmailTool } from '@/tools/lemlist' +import { + linearAddLabelToIssueTool, + linearAddLabelToProjectTool, + linearArchiveIssueTool, + linearArchiveLabelTool, + linearArchiveProjectTool, + linearCreateAttachmentTool, + linearCreateCommentTool, + linearCreateCustomerRequestTool, + linearCreateCustomerStatusTool, + linearCreateCustomerTierTool, + linearCreateCustomerTool, + linearCreateCycleTool, + linearCreateFavoriteTool, + linearCreateIssueRelationTool, + linearCreateIssueTool, + linearCreateLabelTool, + linearCreateProjectLabelTool, + linearCreateProjectMilestoneTool, + linearCreateProjectStatusTool, + linearCreateProjectTool, + linearCreateProjectUpdateTool, + linearCreateWorkflowStateTool, + linearDeleteAttachmentTool, + linearDeleteCommentTool, + linearDeleteCustomerStatusTool, + linearDeleteCustomerTierTool, + linearDeleteCustomerTool, + linearDeleteIssueRelationTool, + linearDeleteIssueTool, + linearDeleteProjectLabelTool, + linearDeleteProjectMilestoneTool, + linearDeleteProjectStatusTool, + linearDeleteProjectTool, + linearGetActiveCycleTool, + linearGetCustomerTool, + linearGetCycleTool, + linearGetIssueTool, + linearGetProjectTool, + linearGetViewerTool, + linearListAttachmentsTool, + linearListCommentsTool, + linearListCustomerRequestsTool, + linearListCustomerStatusesTool, + linearListCustomersTool, + linearListCustomerTiersTool, + linearListCyclesTool, + linearListFavoritesTool, + linearListIssueRelationsTool, + linearListLabelsTool, + linearListNotificationsTool, + linearListProjectLabelsTool, + linearListProjectMilestonesTool, + linearListProjectStatusesTool, + linearListProjectsTool, + linearListProjectUpdatesTool, + linearListTeamsTool, + linearListUsersTool, + linearListWorkflowStatesTool, + linearMergeCustomersTool, + linearReadIssuesTool, + linearRemoveLabelFromIssueTool, + linearRemoveLabelFromProjectTool, + linearSearchIssuesTool, + linearUnarchiveIssueTool, + linearUpdateAttachmentTool, + linearUpdateCommentTool, + linearUpdateCustomerRequestTool, + linearUpdateCustomerStatusTool, + linearUpdateCustomerTierTool, + linearUpdateCustomerTool, + linearUpdateIssueTool, + linearUpdateLabelTool, + linearUpdateNotificationTool, + linearUpdateProjectLabelTool, + linearUpdateProjectMilestoneTool, + linearUpdateProjectStatusTool, + linearUpdateProjectTool, + linearUpdateWorkflowStateTool, +} from '@/tools/linear' +import { linkedInGetProfileTool, linkedInSharePostTool } from '@/tools/linkedin' +import { linkupSearchTool } from '@/tools/linkup' +import { llmChatTool } from '@/tools/llm' +import { + mailchimpAddMemberTagsTool, + mailchimpAddMemberTool, + mailchimpAddOrUpdateMemberTool, + mailchimpAddSegmentMemberTool, + mailchimpAddSubscriberToAutomationTool, + mailchimpArchiveMemberTool, + mailchimpCreateAudienceTool, + mailchimpCreateBatchOperationTool, + mailchimpCreateCampaignTool, + mailchimpCreateInterestCategoryTool, + mailchimpCreateInterestTool, + mailchimpCreateLandingPageTool, + mailchimpCreateMergeFieldTool, + mailchimpCreateSegmentTool, + mailchimpCreateTemplateTool, + mailchimpDeleteAudienceTool, + mailchimpDeleteBatchOperationTool, + mailchimpDeleteCampaignTool, + mailchimpDeleteInterestCategoryTool, + mailchimpDeleteInterestTool, + mailchimpDeleteLandingPageTool, + mailchimpDeleteMemberTool, + mailchimpDeleteMergeFieldTool, + mailchimpDeleteSegmentTool, + mailchimpDeleteTemplateTool, + mailchimpGetAudiencesTool, + mailchimpGetAudienceTool, + mailchimpGetAutomationsTool, + mailchimpGetAutomationTool, + mailchimpGetBatchOperationsTool, + mailchimpGetBatchOperationTool, + mailchimpGetCampaignContentTool, + mailchimpGetCampaignReportsTool, + mailchimpGetCampaignReportTool, + mailchimpGetCampaignsTool, + mailchimpGetCampaignTool, + mailchimpGetInterestCategoriesTool, + mailchimpGetInterestCategoryTool, + mailchimpGetInterestsTool, + mailchimpGetInterestTool, + mailchimpGetLandingPagesTool, + mailchimpGetLandingPageTool, + mailchimpGetMembersTool, + mailchimpGetMemberTagsTool, + mailchimpGetMemberTool, + mailchimpGetMergeFieldsTool, + mailchimpGetMergeFieldTool, + mailchimpGetSegmentMembersTool, + mailchimpGetSegmentsTool, + mailchimpGetSegmentTool, + mailchimpGetTemplatesTool, + mailchimpGetTemplateTool, + mailchimpPauseAutomationTool, + mailchimpPublishLandingPageTool, + mailchimpRemoveMemberTagsTool, + mailchimpRemoveSegmentMemberTool, + mailchimpReplicateCampaignTool, + mailchimpScheduleCampaignTool, + mailchimpSendCampaignTool, + mailchimpSetCampaignContentTool, + mailchimpStartAutomationTool, + mailchimpUnarchiveMemberTool, + mailchimpUnpublishLandingPageTool, + mailchimpUnscheduleCampaignTool, + mailchimpUpdateAudienceTool, + mailchimpUpdateCampaignTool, + mailchimpUpdateInterestCategoryTool, + mailchimpUpdateInterestTool, + mailchimpUpdateLandingPageTool, + mailchimpUpdateMemberTool, + mailchimpUpdateMergeFieldTool, + mailchimpUpdateSegmentTool, + mailchimpUpdateTemplateTool, +} from '@/tools/mailchimp' +import { + mailgunAddListMemberTool, + mailgunCreateMailingListTool, + mailgunGetDomainTool, + mailgunGetMailingListTool, + mailgunGetMessageTool, + mailgunListDomainsTool, + mailgunListMessagesTool, + mailgunSendMessageTool, +} from '@/tools/mailgun' +import { mem0AddMemoriesTool, mem0GetMemoriesTool, mem0SearchMemoriesTool } from '@/tools/mem0' +import { memoryAddTool, memoryDeleteTool, memoryGetAllTool, memoryGetTool } from '@/tools/memory' +import { + microsoftExcelReadTool, + microsoftExcelReadV2Tool, + microsoftExcelTableAddTool, + microsoftExcelWorksheetAddTool, + microsoftExcelWriteTool, + microsoftExcelWriteV2Tool, +} from '@/tools/microsoft_excel' +import { + microsoftPlannerCreateBucketTool, + microsoftPlannerCreateTaskTool, + microsoftPlannerDeleteBucketTool, + microsoftPlannerDeleteTaskTool, + microsoftPlannerGetTaskDetailsTool, + microsoftPlannerListBucketsTool, + microsoftPlannerListPlansTool, + microsoftPlannerReadBucketTool, + microsoftPlannerReadPlanTool, + microsoftPlannerReadTaskTool, + microsoftPlannerUpdateBucketTool, + microsoftPlannerUpdateTaskDetailsTool, + microsoftPlannerUpdateTaskTool, +} from '@/tools/microsoft_planner' +import { + microsoftTeamsDeleteChannelMessageTool, + microsoftTeamsDeleteChatMessageTool, + microsoftTeamsGetMessageTool, + microsoftTeamsListChannelMembersTool, + microsoftTeamsListTeamMembersTool, + microsoftTeamsReadChannelTool, + microsoftTeamsReadChatTool, + microsoftTeamsReplyToMessageTool, + microsoftTeamsSetReactionTool, + microsoftTeamsUnsetReactionTool, + microsoftTeamsUpdateChannelMessageTool, + microsoftTeamsUpdateChatMessageTool, + microsoftTeamsWriteChannelTool, + microsoftTeamsWriteChatTool, +} from '@/tools/microsoft_teams' +import { mistralParserTool, mistralParserV2Tool, mistralParserV3Tool } from '@/tools/mistral' +import { + mongodbDeleteTool, + mongodbExecuteTool, + mongodbInsertTool, + mongodbIntrospectTool, + mongodbQueryTool, + mongodbUpdateTool, +} from '@/tools/mongodb' +import { + mysqlDeleteTool, + mysqlExecuteTool, + mysqlInsertTool, + mysqlIntrospectTool, + mysqlQueryTool, + mysqlUpdateTool, +} from '@/tools/mysql' +import { + neo4jCreateTool, + neo4jDeleteTool, + neo4jExecuteTool, + neo4jIntrospectTool, + neo4jMergeTool, + neo4jQueryTool, + neo4jUpdateTool, +} from '@/tools/neo4j' +import { + notionAddDatabaseRowTool, + notionCreateDatabaseTool, + notionCreateDatabaseV2Tool, + notionCreatePageTool, + notionCreatePageV2Tool, + notionQueryDatabaseTool, + notionQueryDatabaseV2Tool, + notionReadDatabaseTool, + notionReadDatabaseV2Tool, + notionReadTool, + notionReadV2Tool, + notionSearchTool, + notionSearchV2Tool, + notionUpdatePageV2Tool, + notionWriteTool, + notionWriteV2Tool, +} from '@/tools/notion' +import { + onedriveCreateFolderTool, + onedriveDeleteTool, + onedriveDownloadTool, + onedriveListTool, + onedriveUploadTool, +} from '@/tools/onedrive' +import { openAIEmbeddingsTool, openAIImageTool } from '@/tools/openai' +import { + outlookCopyTool, + outlookDeleteTool, + outlookDraftTool, + outlookForwardTool, + outlookMarkReadTool, + outlookMarkUnreadTool, + outlookMoveTool, + outlookReadTool, + outlookSendTool, +} from '@/tools/outlook' +import { parallelDeepResearchTool, parallelExtractTool, parallelSearchTool } from '@/tools/parallel' +import { perplexityChatTool, perplexitySearchTool } from '@/tools/perplexity' +import { + pineconeFetchTool, + pineconeGenerateEmbeddingsTool, + pineconeSearchTextTool, + pineconeSearchVectorTool, + pineconeUpsertTextTool, +} from '@/tools/pinecone' +import { + pipedriveCreateActivityTool, + pipedriveCreateDealTool, + pipedriveCreateLeadTool, + pipedriveCreateProjectTool, + pipedriveDeleteLeadTool, + pipedriveGetActivitiesTool, + pipedriveGetAllDealsTool, + pipedriveGetDealTool, + pipedriveGetFilesTool, + pipedriveGetLeadsTool, + pipedriveGetMailMessagesTool, + pipedriveGetMailThreadTool, + pipedriveGetPipelineDealsTool, + pipedriveGetPipelinesTool, + pipedriveGetProjectsTool, + pipedriveUpdateActivityTool, + pipedriveUpdateDealTool, + pipedriveUpdateLeadTool, +} from '@/tools/pipedrive' +import { + polymarketGetActivityTool, + polymarketGetEventsTool, + polymarketGetEventTool, + polymarketGetHoldersTool, + polymarketGetLastTradePriceTool, + polymarketGetLeaderboardTool, + polymarketGetMarketsTool, + polymarketGetMarketTool, + polymarketGetMidpointTool, + polymarketGetOrderbookTool, + polymarketGetPositionsTool, + polymarketGetPriceHistoryTool, + polymarketGetPriceTool, + polymarketGetSeriesByIdTool, + polymarketGetSeriesTool, + polymarketGetSpreadTool, + polymarketGetTagsTool, + polymarketGetTickSizeTool, + polymarketGetTradesTool, + polymarketSearchTool, +} from '@/tools/polymarket' +import { + postgresDeleteTool, + postgresExecuteTool, + postgresInsertTool, + postgresIntrospectTool, + postgresQueryTool, + postgresUpdateTool, +} from '@/tools/postgresql' +import { + posthogBatchEventsTool, + posthogCaptureEventTool, + posthogCreateAnnotationTool, + posthogCreateCohortTool, + posthogCreateExperimentTool, + posthogCreateFeatureFlagTool, + posthogCreateInsightTool, + posthogCreateSurveyTool, + posthogDeleteFeatureFlagTool, + posthogDeletePersonTool, + posthogEvaluateFlagsTool, + posthogGetCohortTool, + posthogGetDashboardTool, + posthogGetEventDefinitionTool, + posthogGetExperimentTool, + posthogGetFeatureFlagTool, + posthogGetInsightTool, + posthogGetOrganizationTool, + posthogGetPersonTool, + posthogGetProjectTool, + posthogGetPropertyDefinitionTool, + posthogGetSessionRecordingTool, + posthogGetSurveyTool, + posthogListActionsTool, + posthogListAnnotationsTool, + posthogListCohortsTool, + posthogListDashboardsTool, + posthogListEventDefinitionsTool, + posthogListExperimentsTool, + posthogListFeatureFlagsTool, + posthogListInsightsTool, + posthogListOrganizationsTool, + posthogListPersonsTool, + posthogListProjectsTool, + posthogListPropertyDefinitionsTool, + posthogListRecordingPlaylistsTool, + posthogListSessionRecordingsTool, + posthogListSurveysTool, + posthogQueryTool, + posthogUpdateEventDefinitionTool, + posthogUpdateFeatureFlagTool, + posthogUpdatePropertyDefinitionTool, + posthogUpdateSurveyTool, +} from '@/tools/posthog' +import { pulseParserTool, pulseParserV2Tool } from '@/tools/pulse' +import { qdrantFetchTool, qdrantSearchTool, qdrantUpsertTool } from '@/tools/qdrant' +import { + rdsDeleteTool, + rdsExecuteTool, + rdsInsertTool, + rdsIntrospectTool, + rdsQueryTool, + rdsUpdateTool, +} from '@/tools/rds' +import { + redditDeleteTool, + redditEditTool, + redditGetCommentsTool, + redditGetControversialTool, + redditGetPostsTool, + redditHotPostsTool, + redditReplyTool, + redditSaveTool, + redditSearchTool, + redditSubmitPostTool, + redditSubscribeTool, + redditUnsaveTool, + redditVoteTool, +} from '@/tools/reddit' +import { reductoParserTool, reductoParserV2Tool } from '@/tools/reducto' +import { mailSendTool } from '@/tools/resend' +import { + s3CopyObjectTool, + s3DeleteObjectTool, + s3GetObjectTool, + s3ListObjectsTool, + s3PutObjectTool, +} from '@/tools/s3' +import { + salesforceCreateAccountTool, + salesforceCreateCaseTool, + salesforceCreateContactTool, + salesforceCreateLeadTool, + salesforceCreateOpportunityTool, + salesforceCreateTaskTool, + salesforceDeleteAccountTool, + salesforceDeleteCaseTool, + salesforceDeleteContactTool, + salesforceDeleteLeadTool, + salesforceDeleteOpportunityTool, + salesforceDeleteTaskTool, + salesforceDescribeObjectTool, + salesforceGetAccountsTool, + salesforceGetCasesTool, + salesforceGetContactsTool, + salesforceGetDashboardTool, + salesforceGetLeadsTool, + salesforceGetOpportunitiesTool, + salesforceGetReportTool, + salesforceGetTasksTool, + salesforceListDashboardsTool, + salesforceListObjectsTool, + salesforceListReportsTool, + salesforceListReportTypesTool, + salesforceQueryMoreTool, + salesforceQueryTool, + salesforceRefreshDashboardTool, + salesforceRunReportTool, + salesforceUpdateAccountTool, + salesforceUpdateCaseTool, + salesforceUpdateContactTool, + salesforceUpdateLeadTool, + salesforceUpdateOpportunityTool, + salesforceUpdateTaskTool, +} from '@/tools/salesforce' +import { searchTool } from '@/tools/search' +import { + sendGridAddContactsToListTool, + sendGridAddContactTool, + sendGridCreateListTool, + sendGridCreateTemplateTool, + sendGridCreateTemplateVersionTool, + sendGridDeleteContactsTool, + sendGridDeleteListTool, + sendGridDeleteTemplateTool, + sendGridGetContactTool, + sendGridGetListTool, + sendGridGetTemplateTool, + sendGridListAllListsTool, + sendGridListTemplatesTool, + sendGridRemoveContactsFromListTool, + sendGridSearchContactsTool, + sendGridSendMailTool, +} from '@/tools/sendgrid' +import { + createDeployTool, + createProjectTool, + createReleaseTool, + getEventTool, + getIssueTool, + getProjectTool, + listEventsTool, + listIssuesTool, + listProjectsTool, + listReleasesTool, + updateIssueTool, + updateProjectTool, +} from '@/tools/sentry' +import { serperSearchTool } from '@/tools/serper' +import { + servicenowCreateRecordTool, + servicenowDeleteRecordTool, + servicenowReadRecordTool, + servicenowUpdateRecordTool, +} from '@/tools/servicenow' +import { + sftpDeleteTool, + sftpDownloadTool, + sftpListTool, + sftpMkdirTool, + sftpUploadTool, +} from '@/tools/sftp' +import { + sharepointAddListItemTool, + sharepointCreateListTool, + sharepointCreatePageTool, + sharepointGetListTool, + sharepointListSitesTool, + sharepointReadPageTool, + sharepointUpdateListItemTool, + sharepointUploadFileTool, +} from '@/tools/sharepoint' +import { + shopifyAdjustInventoryTool, + shopifyCancelOrderTool, + shopifyCreateCustomerTool, + shopifyCreateFulfillmentTool, + shopifyCreateProductTool, + shopifyDeleteCustomerTool, + shopifyDeleteProductTool, + shopifyGetCollectionTool, + shopifyGetCustomerTool, + shopifyGetInventoryLevelTool, + shopifyGetOrderTool, + shopifyGetProductTool, + shopifyListCollectionsTool, + shopifyListCustomersTool, + shopifyListInventoryItemsTool, + shopifyListLocationsTool, + shopifyListOrdersTool, + shopifyListProductsTool, + shopifyUpdateCustomerTool, + shopifyUpdateOrderTool, + shopifyUpdateProductTool, +} from '@/tools/shopify' +import { + similarwebBounceRateTool, + similarwebPagesPerVisitTool, + similarwebTrafficVisitsTool, + similarwebVisitDurationTool, + similarwebWebsiteOverviewTool, +} from '@/tools/similarweb' +import { + slackAddReactionTool, + slackCanvasTool, + slackDeleteMessageTool, + slackDownloadTool, + slackGetMessageTool, + slackGetThreadTool, + slackGetUserTool, + slackListChannelsTool, + slackListMembersTool, + slackListUsersTool, + slackMessageReaderTool, + slackMessageTool, + slackUpdateMessageTool, +} from '@/tools/slack' +import { smsSendTool } from '@/tools/sms' +import { smtpSendMailTool } from '@/tools/smtp' +import { + spotifyAddPlaylistCoverTool, + spotifyAddToQueueTool, + spotifyAddTracksToPlaylistTool, + spotifyCheckFollowingTool, + spotifyCheckPlaylistFollowersTool, + spotifyCheckSavedAlbumsTool, + spotifyCheckSavedAudiobooksTool, + spotifyCheckSavedEpisodesTool, + spotifyCheckSavedShowsTool, + spotifyCheckSavedTracksTool, + spotifyCreatePlaylistTool, + spotifyFollowArtistsTool, + spotifyFollowPlaylistTool, + spotifyGetAlbumsTool, + spotifyGetAlbumTool, + spotifyGetAlbumTracksTool, + spotifyGetArtistAlbumsTool, + spotifyGetArtistsTool, + spotifyGetArtistTool, + spotifyGetArtistTopTracksTool, + spotifyGetAudiobookChaptersTool, + spotifyGetAudiobooksTool, + spotifyGetAudiobookTool, + spotifyGetCategoriesTool, + spotifyGetCurrentlyPlayingTool, + spotifyGetCurrentUserTool, + spotifyGetDevicesTool, + spotifyGetEpisodesTool, + spotifyGetEpisodeTool, + spotifyGetFollowedArtistsTool, + spotifyGetMarketsTool, + spotifyGetNewReleasesTool, + spotifyGetPlaybackStateTool, + spotifyGetPlaylistCoverTool, + spotifyGetPlaylistTool, + spotifyGetPlaylistTracksTool, + spotifyGetQueueTool, + spotifyGetRecentlyPlayedTool, + spotifyGetSavedAlbumsTool, + spotifyGetSavedAudiobooksTool, + spotifyGetSavedEpisodesTool, + spotifyGetSavedShowsTool, + spotifyGetSavedTracksTool, + spotifyGetShowEpisodesTool, + spotifyGetShowsTool, + spotifyGetShowTool, + spotifyGetTopArtistsTool, + spotifyGetTopTracksTool, + spotifyGetTracksTool, + spotifyGetTrackTool, + spotifyGetUserPlaylistsTool, + spotifyGetUserProfileTool, + spotifyPauseTool, + spotifyPlayTool, + spotifyRemoveSavedAlbumsTool, + spotifyRemoveSavedAudiobooksTool, + spotifyRemoveSavedEpisodesTool, + spotifyRemoveSavedShowsTool, + spotifyRemoveSavedTracksTool, + spotifyRemoveTracksFromPlaylistTool, + spotifyReorderPlaylistItemsTool, + spotifyReplacePlaylistItemsTool, + spotifySaveAlbumsTool, + spotifySaveAudiobooksTool, + spotifySaveEpisodesTool, + spotifySaveShowsTool, + spotifySaveTracksTool, + spotifySearchTool, + spotifySeekTool, + spotifySetRepeatTool, + spotifySetShuffleTool, + spotifySetVolumeTool, + spotifySkipNextTool, + spotifySkipPreviousTool, + spotifyTransferPlaybackTool, + spotifyUnfollowArtistsTool, + spotifyUnfollowPlaylistTool, + spotifyUpdatePlaylistTool, +} from '@/tools/spotify' +import { + sshCheckCommandExistsTool, + sshCheckFileExistsTool, + sshCreateDirectoryTool, + sshDeleteFileTool, + sshDownloadFileTool, + sshExecuteCommandTool, + sshExecuteScriptTool, + sshGetSystemInfoTool, + sshListDirectoryTool, + sshMoveRenameTool, + sshReadFileContentTool, + sshUploadFileTool, + sshWriteFileContentTool, +} from '@/tools/ssh' +import { stagehandAgentTool, stagehandExtractTool } from '@/tools/stagehand' +import { + stripeCancelPaymentIntentTool, + stripeCancelSubscriptionTool, + stripeCaptureChargeTool, + stripeCapturePaymentIntentTool, + stripeConfirmPaymentIntentTool, + stripeCreateChargeTool, + stripeCreateCustomerTool, + stripeCreateInvoiceTool, + stripeCreatePaymentIntentTool, + stripeCreatePriceTool, + stripeCreateProductTool, + stripeCreateSubscriptionTool, + stripeDeleteCustomerTool, + stripeDeleteInvoiceTool, + stripeDeleteProductTool, + stripeFinalizeInvoiceTool, + stripeListChargesTool, + stripeListCustomersTool, + stripeListEventsTool, + stripeListInvoicesTool, + stripeListPaymentIntentsTool, + stripeListPricesTool, + stripeListProductsTool, + stripeListSubscriptionsTool, + stripePayInvoiceTool, + stripeResumeSubscriptionTool, + stripeRetrieveChargeTool, + stripeRetrieveCustomerTool, + stripeRetrieveEventTool, + stripeRetrieveInvoiceTool, + stripeRetrievePaymentIntentTool, + stripeRetrievePriceTool, + stripeRetrieveProductTool, + stripeRetrieveSubscriptionTool, + stripeSearchChargesTool, + stripeSearchCustomersTool, + stripeSearchInvoicesTool, + stripeSearchPaymentIntentsTool, + stripeSearchPricesTool, + stripeSearchProductsTool, + stripeSearchSubscriptionsTool, + stripeSendInvoiceTool, + stripeUpdateChargeTool, + stripeUpdateCustomerTool, + stripeUpdateInvoiceTool, + stripeUpdatePaymentIntentTool, + stripeUpdatePriceTool, + stripeUpdateProductTool, + stripeUpdateSubscriptionTool, + stripeVoidInvoiceTool, +} from '@/tools/stripe' +import { + assemblyaiSttTool, + assemblyaiSttV2Tool, + deepgramSttTool, + deepgramSttV2Tool, + elevenLabsSttTool, + elevenLabsSttV2Tool, + geminiSttTool, + geminiSttV2Tool, + whisperSttTool, + whisperSttV2Tool, +} from '@/tools/stt' +import { + supabaseCountTool, + supabaseDeleteTool, + supabaseGetRowTool, + supabaseInsertTool, + supabaseIntrospectTool, + supabaseQueryTool, + supabaseRpcTool, + supabaseStorageCopyTool, + supabaseStorageCreateBucketTool, + supabaseStorageCreateSignedUrlTool, + supabaseStorageDeleteBucketTool, + supabaseStorageDeleteTool, + supabaseStorageDownloadTool, + supabaseStorageGetPublicUrlTool, + supabaseStorageListBucketsTool, + supabaseStorageListTool, + supabaseStorageMoveTool, + supabaseStorageUploadTool, + supabaseTextSearchTool, + supabaseUpdateTool, + supabaseUpsertTool, + supabaseVectorSearchTool, +} from '@/tools/supabase' +import { tavilyCrawlTool, tavilyExtractTool, tavilyMapTool, tavilySearchTool } from '@/tools/tavily' +import { + telegramDeleteMessageTool, + telegramMessageTool, + telegramSendAnimationTool, + telegramSendAudioTool, + telegramSendDocumentTool, + telegramSendPhotoTool, + telegramSendVideoTool, +} from '@/tools/telegram' +import { textractParserTool, textractParserV2Tool } from '@/tools/textract' +import { thinkingTool } from '@/tools/thinking' +import { tinybirdEventsTool, tinybirdQueryTool } from '@/tools/tinybird' +import { + trelloAddCommentTool, + trelloCreateCardTool, + trelloGetActionsTool, + trelloListCardsTool, + trelloListListsTool, + trelloUpdateCardTool, +} from '@/tools/trello' +import { + azureTtsTool, + cartesiaTtsTool, + deepgramTtsTool, + elevenLabsTtsUnifiedTool, + googleTtsTool, + openaiTtsTool, + playhtTtsTool, +} from '@/tools/tts' +import { sendSMSTool } from '@/tools/twilio' +import { getRecordingTool, listCallsTool, makeCallTool } from '@/tools/twilio_voice' +import { + typeformCreateFormTool, + typeformDeleteFormTool, + typeformFilesTool, + typeformGetFormTool, + typeformInsightsTool, + typeformListFormsTool, + typeformResponsesTool, + typeformUpdateFormTool, +} from '@/tools/typeform' +import type { ToolConfig } from '@/tools/types' +import { + falaiVideoTool, + lumaVideoTool, + minimaxVideoTool, + runwayVideoTool, + veoVideoTool, +} from '@/tools/video' +import { visionTool, visionToolV2 } from '@/tools/vision' +import { + wealthboxReadContactTool, + wealthboxReadNoteTool, + wealthboxReadTaskTool, + wealthboxWriteContactTool, + wealthboxWriteNoteTool, + wealthboxWriteTaskTool, +} from '@/tools/wealthbox' +import { + webflowCreateItemTool, + webflowDeleteItemTool, + webflowGetItemTool, + webflowListItemsTool, + webflowUpdateItemTool, +} from '@/tools/webflow' +import { whatsappSendMessageTool } from '@/tools/whatsapp' +import { + wikipediaPageContentTool, + wikipediaPageSummaryTool, + wikipediaRandomPageTool, + wikipediaSearchTool, +} from '@/tools/wikipedia' +import { + wordpressCreateCategoryTool, + wordpressCreateCommentTool, + wordpressCreatePageTool, + wordpressCreatePostTool, + wordpressCreateTagTool, + wordpressDeleteCommentTool, + wordpressDeleteMediaTool, + wordpressDeletePageTool, + wordpressDeletePostTool, + wordpressGetCurrentUserTool, + wordpressGetMediaTool, + wordpressGetPageTool, + wordpressGetPostTool, + wordpressGetUserTool, + wordpressListCategoriesTool, + wordpressListCommentsTool, + wordpressListMediaTool, + wordpressListPagesTool, + wordpressListPostsTool, + wordpressListTagsTool, + wordpressListUsersTool, + wordpressSearchContentTool, + wordpressUpdateCommentTool, + wordpressUpdatePageTool, + wordpressUpdatePostTool, + wordpressUploadMediaTool, +} from '@/tools/wordpress' +import { workflowExecutorTool } from '@/tools/workflow' +import { xReadTool, xSearchTool, xUserTool, xWriteTool } from '@/tools/x' +import { + youtubeChannelInfoTool, + youtubeChannelPlaylistsTool, + youtubeChannelVideosTool, + youtubeCommentsTool, + youtubePlaylistItemsTool, + youtubeSearchTool, + youtubeTrendingTool, + youtubeVideoCategoriesTool, + youtubeVideoDetailsTool, +} from '@/tools/youtube' +import { + zendeskAutocompleteOrganizationsTool, + zendeskCreateOrganizationsBulkTool, + zendeskCreateOrganizationTool, + zendeskCreateTicketsBulkTool, + zendeskCreateTicketTool, + zendeskCreateUsersBulkTool, + zendeskCreateUserTool, + zendeskDeleteOrganizationTool, + zendeskDeleteTicketTool, + zendeskDeleteUserTool, + zendeskGetCurrentUserTool, + zendeskGetOrganizationsTool, + zendeskGetOrganizationTool, + zendeskGetTicketsTool, + zendeskGetTicketTool, + zendeskGetUsersTool, + zendeskGetUserTool, + zendeskMergeTicketsTool, + zendeskSearchCountTool, + zendeskSearchTool, + zendeskSearchUsersTool, + zendeskUpdateOrganizationTool, + zendeskUpdateTicketsBulkTool, + zendeskUpdateTicketTool, + zendeskUpdateUsersBulkTool, + zendeskUpdateUserTool, +} from '@/tools/zendesk' +import { + zepAddMessagesTool, + zepAddUserTool, + zepCreateThreadTool, + zepDeleteThreadTool, + zepGetContextTool, + zepGetMessagesTool, + zepGetThreadsTool, + zepGetUserThreadsTool, + zepGetUserTool, +} from '@/tools/zep' +import { + zoomCreateMeetingTool, + zoomDeleteMeetingTool, + zoomDeleteRecordingTool, + zoomGetMeetingInvitationTool, + zoomGetMeetingRecordingsTool, + zoomGetMeetingTool, + zoomListMeetingsTool, + zoomListPastParticipantsTool, + zoomListRecordingsTool, + zoomUpdateMeetingTool, +} from '@/tools/zoom' +import { sqsSendTool } from './sqs' + +// Registry of all available tools +export const tools: Record = { + a2a_cancel_task: a2aCancelTaskTool, + a2a_delete_push_notification: a2aDeletePushNotificationTool, + a2a_get_agent_card: a2aGetAgentCardTool, + a2a_get_push_notification: a2aGetPushNotificationTool, + a2a_get_task: a2aGetTaskTool, + a2a_resubscribe: a2aResubscribeTool, + a2a_send_message: a2aSendMessageTool, + a2a_set_push_notification: a2aSetPushNotificationTool, + airweave_search: airweaveSearchTool, + arxiv_search: arxivSearchTool, + arxiv_get_paper: arxivGetPaperTool, + arxiv_get_author_papers: arxivGetAuthorPapersTool, + asana_get_task: asanaGetTaskTool, + asana_create_task: asanaCreateTaskTool, + asana_update_task: asanaUpdateTaskTool, + asana_get_projects: asanaGetProjectsTool, + asana_search_tasks: asanaSearchTasksTool, + asana_add_comment: asanaAddCommentTool, + browser_use_run_task: browserUseRunTaskTool, + openai_embeddings: openAIEmbeddingsTool, + http_request: httpRequestTool, + webhook_request: webhookRequestTool, + huggingface_chat: huggingfaceChatTool, + llm_chat: llmChatTool, + function_execute: functionExecuteTool, + vision_tool: visionTool, + vision_tool_v2: visionToolV2, + file_parser: fileParseTool, + file_parser_v2: fileParserV2Tool, + file_parser_v3: fileParserV3Tool, + firecrawl_scrape: firecrawlScrapeTool, + firecrawl_search: firecrawlSearchTool, + firecrawl_crawl: firecrawlCrawlTool, + firecrawl_map: firecrawlMapTool, + firecrawl_extract: firecrawlExtractTool, + firecrawl_agent: firecrawlAgentTool, + fireflies_list_transcripts: firefliesListTranscriptsTool, + fireflies_get_transcript: firefliesGetTranscriptTool, + fireflies_get_user: firefliesGetUserTool, + fireflies_list_users: firefliesListUsersTool, + fireflies_upload_audio: firefliesUploadAudioTool, + fireflies_delete_transcript: firefliesDeleteTranscriptTool, + fireflies_add_to_live_meeting: firefliesAddToLiveMeetingTool, + fireflies_create_bite: firefliesCreateBiteTool, + fireflies_list_bites: firefliesListBitesTool, + fireflies_list_contacts: firefliesListContactsTool, + grafana_get_dashboard: grafanaGetDashboardTool, + grafana_list_dashboards: grafanaListDashboardsTool, + grafana_create_dashboard: grafanaCreateDashboardTool, + grafana_update_dashboard: grafanaUpdateDashboardTool, + grafana_delete_dashboard: grafanaDeleteDashboardTool, + grafana_list_alert_rules: grafanaListAlertRulesTool, + grafana_get_alert_rule: grafanaGetAlertRuleTool, + grafana_create_alert_rule: grafanaCreateAlertRuleTool, + grafana_update_alert_rule: grafanaUpdateAlertRuleTool, + grafana_delete_alert_rule: grafanaDeleteAlertRuleTool, + grafana_list_contact_points: grafanaListContactPointsTool, + grafana_create_annotation: grafanaCreateAnnotationTool, + grafana_list_annotations: grafanaListAnnotationsTool, + grafana_update_annotation: grafanaUpdateAnnotationTool, + grafana_delete_annotation: grafanaDeleteAnnotationTool, + grafana_list_data_sources: grafanaListDataSourcesTool, + grafana_get_data_source: grafanaGetDataSourceTool, + grafana_list_folders: grafanaListFoldersTool, + grafana_create_folder: grafanaCreateFolderTool, + google_search: googleSearchTool, + guardrails_validate: guardrailsValidateTool, + jina_read_url: jinaReadUrlTool, + jina_search: jinaSearchTool, + linkup_search: linkupSearchTool, + linkedin_share_post: linkedInSharePostTool, + linkedin_get_profile: linkedInGetProfileTool, + resend_send: mailSendTool, + sendgrid_send_mail: sendGridSendMailTool, + sendgrid_add_contact: sendGridAddContactTool, + sendgrid_get_contact: sendGridGetContactTool, + sendgrid_search_contacts: sendGridSearchContactsTool, + sendgrid_delete_contacts: sendGridDeleteContactsTool, + sendgrid_create_list: sendGridCreateListTool, + sendgrid_get_list: sendGridGetListTool, + sendgrid_list_all_lists: sendGridListAllListsTool, + sendgrid_delete_list: sendGridDeleteListTool, + sendgrid_add_contacts_to_list: sendGridAddContactsToListTool, + sendgrid_remove_contacts_from_list: sendGridRemoveContactsFromListTool, + sendgrid_create_template: sendGridCreateTemplateTool, + sendgrid_get_template: sendGridGetTemplateTool, + sendgrid_list_templates: sendGridListTemplatesTool, + sendgrid_delete_template: sendGridDeleteTemplateTool, + sendgrid_create_template_version: sendGridCreateTemplateVersionTool, + smtp_send_mail: smtpSendMailTool, + sftp_upload: sftpUploadTool, + sftp_download: sftpDownloadTool, + sftp_list: sftpListTool, + sftp_delete: sftpDeleteTool, + sftp_mkdir: sftpMkdirTool, + ssh_execute_command: sshExecuteCommandTool, + ssh_execute_script: sshExecuteScriptTool, + ssh_check_command_exists: sshCheckCommandExistsTool, + ssh_upload_file: sshUploadFileTool, + ssh_download_file: sshDownloadFileTool, + ssh_list_directory: sshListDirectoryTool, + ssh_check_file_exists: sshCheckFileExistsTool, + ssh_create_directory: sshCreateDirectoryTool, + ssh_delete_file: sshDeleteFileTool, + ssh_move_rename: sshMoveRenameTool, + ssh_get_system_info: sshGetSystemInfoTool, + ssh_read_file_content: sshReadFileContentTool, + ssh_write_file_content: sshWriteFileContentTool, + mailgun_send_message: mailgunSendMessageTool, + mailgun_get_message: mailgunGetMessageTool, + mailgun_list_messages: mailgunListMessagesTool, + mailgun_create_mailing_list: mailgunCreateMailingListTool, + mailgun_get_mailing_list: mailgunGetMailingListTool, + mailgun_add_list_member: mailgunAddListMemberTool, + mailgun_list_domains: mailgunListDomainsTool, + mailgun_get_domain: mailgunGetDomainTool, + sms_send: smsSendTool, + jira_retrieve: jiraRetrieveTool, + jira_update: jiraUpdateTool, + jira_write: jiraWriteTool, + jira_bulk_read: jiraBulkRetrieveTool, + jira_delete_issue: jiraDeleteIssueTool, + jira_assign_issue: jiraAssignIssueTool, + jira_transition_issue: jiraTransitionIssueTool, + jira_search_issues: jiraSearchIssuesTool, + jira_add_comment: jiraAddCommentTool, + jira_get_comments: jiraGetCommentsTool, + jira_update_comment: jiraUpdateCommentTool, + jira_delete_comment: jiraDeleteCommentTool, + jira_get_attachments: jiraGetAttachmentsTool, + jira_add_attachment: jiraAddAttachmentTool, + jira_delete_attachment: jiraDeleteAttachmentTool, + jira_add_worklog: jiraAddWorklogTool, + jira_get_worklogs: jiraGetWorklogsTool, + jira_update_worklog: jiraUpdateWorklogTool, + jira_delete_worklog: jiraDeleteWorklogTool, + jira_create_issue_link: jiraCreateIssueLinkTool, + jira_delete_issue_link: jiraDeleteIssueLinkTool, + jira_add_watcher: jiraAddWatcherTool, + jira_remove_watcher: jiraRemoveWatcherTool, + jira_get_users: jiraGetUsersTool, + jsm_get_service_desks: jsmGetServiceDesksTool, + jsm_get_request_types: jsmGetRequestTypesTool, + jsm_create_request: jsmCreateRequestTool, + jsm_get_request: jsmGetRequestTool, + jsm_get_requests: jsmGetRequestsTool, + jsm_add_comment: jsmAddCommentTool, + jsm_get_comments: jsmGetCommentsTool, + jsm_get_customers: jsmGetCustomersTool, + jsm_add_customer: jsmAddCustomerTool, + jsm_get_organizations: jsmGetOrganizationsTool, + jsm_create_organization: jsmCreateOrganizationTool, + jsm_add_organization: jsmAddOrganizationTool, + jsm_get_queues: jsmGetQueuesTool, + jsm_get_sla: jsmGetSlaTool, + jsm_get_transitions: jsmGetTransitionsTool, + jsm_transition_request: jsmTransitionRequestTool, + jsm_get_participants: jsmGetParticipantsTool, + jsm_add_participants: jsmAddParticipantsTool, + jsm_get_approvals: jsmGetApprovalsTool, + jsm_answer_approval: jsmAnswerApprovalTool, + kalshi_get_markets: kalshiGetMarketsTool, + kalshi_get_markets_v2: kalshiGetMarketsV2Tool, + kalshi_get_market: kalshiGetMarketTool, + kalshi_get_market_v2: kalshiGetMarketV2Tool, + kalshi_get_events: kalshiGetEventsTool, + kalshi_get_events_v2: kalshiGetEventsV2Tool, + kalshi_get_event: kalshiGetEventTool, + kalshi_get_event_v2: kalshiGetEventV2Tool, + kalshi_get_balance: kalshiGetBalanceTool, + kalshi_get_balance_v2: kalshiGetBalanceV2Tool, + kalshi_get_positions: kalshiGetPositionsTool, + kalshi_get_positions_v2: kalshiGetPositionsV2Tool, + kalshi_get_orders: kalshiGetOrdersTool, + kalshi_get_orders_v2: kalshiGetOrdersV2Tool, + kalshi_get_order: kalshiGetOrderTool, + kalshi_get_order_v2: kalshiGetOrderV2Tool, + kalshi_get_orderbook: kalshiGetOrderbookTool, + kalshi_get_orderbook_v2: kalshiGetOrderbookV2Tool, + kalshi_get_trades: kalshiGetTradesTool, + kalshi_get_trades_v2: kalshiGetTradesV2Tool, + kalshi_get_candlesticks: kalshiGetCandlesticksTool, + kalshi_get_candlesticks_v2: kalshiGetCandlesticksV2Tool, + kalshi_get_fills: kalshiGetFillsTool, + kalshi_get_fills_v2: kalshiGetFillsV2Tool, + kalshi_get_series_by_ticker: kalshiGetSeriesByTickerTool, + kalshi_get_series_by_ticker_v2: kalshiGetSeriesByTickerV2Tool, + kalshi_get_exchange_status: kalshiGetExchangeStatusTool, + kalshi_get_exchange_status_v2: kalshiGetExchangeStatusV2Tool, + kalshi_create_order: kalshiCreateOrderTool, + kalshi_create_order_v2: kalshiCreateOrderV2Tool, + kalshi_cancel_order: kalshiCancelOrderTool, + kalshi_cancel_order_v2: kalshiCancelOrderV2Tool, + kalshi_amend_order: kalshiAmendOrderTool, + kalshi_amend_order_v2: kalshiAmendOrderV2Tool, + polymarket_get_markets: polymarketGetMarketsTool, + polymarket_get_market: polymarketGetMarketTool, + polymarket_get_events: polymarketGetEventsTool, + polymarket_get_event: polymarketGetEventTool, + polymarket_get_orderbook: polymarketGetOrderbookTool, + polymarket_get_price: polymarketGetPriceTool, + polymarket_get_midpoint: polymarketGetMidpointTool, + polymarket_get_price_history: polymarketGetPriceHistoryTool, + polymarket_get_tags: polymarketGetTagsTool, + polymarket_search: polymarketSearchTool, + polymarket_get_series: polymarketGetSeriesTool, + polymarket_get_series_by_id: polymarketGetSeriesByIdTool, + polymarket_get_last_trade_price: polymarketGetLastTradePriceTool, + polymarket_get_spread: polymarketGetSpreadTool, + polymarket_get_tick_size: polymarketGetTickSizeTool, + polymarket_get_positions: polymarketGetPositionsTool, + polymarket_get_trades: polymarketGetTradesTool, + polymarket_get_activity: polymarketGetActivityTool, + polymarket_get_leaderboard: polymarketGetLeaderboardTool, + polymarket_get_holders: polymarketGetHoldersTool, + slack_message: slackMessageTool, + slack_message_reader: slackMessageReaderTool, + slack_list_channels: slackListChannelsTool, + slack_list_members: slackListMembersTool, + slack_list_users: slackListUsersTool, + slack_get_user: slackGetUserTool, + slack_get_message: slackGetMessageTool, + slack_get_thread: slackGetThreadTool, + slack_canvas: slackCanvasTool, + slack_download: slackDownloadTool, + slack_update_message: slackUpdateMessageTool, + slack_delete_message: slackDeleteMessageTool, + slack_add_reaction: slackAddReactionTool, + github_repo_info: githubRepoInfoTool, + github_repo_info_v2: githubRepoInfoV2Tool, + github_latest_commit: githubLatestCommitTool, + github_latest_commit_v2: githubLatestCommitV2Tool, + serper_search: serperSearchTool, + similarweb_website_overview: similarwebWebsiteOverviewTool, + similarweb_traffic_visits: similarwebTrafficVisitsTool, + similarweb_bounce_rate: similarwebBounceRateTool, + similarweb_pages_per_visit: similarwebPagesPerVisitTool, + similarweb_visit_duration: similarwebVisitDurationTool, + servicenow_create_record: servicenowCreateRecordTool, + servicenow_read_record: servicenowReadRecordTool, + servicenow_update_record: servicenowUpdateRecordTool, + servicenow_delete_record: servicenowDeleteRecordTool, + tavily_search: tavilySearchTool, + tavily_extract: tavilyExtractTool, + tavily_crawl: tavilyCrawlTool, + tavily_map: tavilyMapTool, + supabase_query: supabaseQueryTool, + supabase_insert: supabaseInsertTool, + supabase_get_row: supabaseGetRowTool, + supabase_update: supabaseUpdateTool, + supabase_delete: supabaseDeleteTool, + supabase_upsert: supabaseUpsertTool, + supabase_count: supabaseCountTool, + supabase_text_search: supabaseTextSearchTool, + supabase_vector_search: supabaseVectorSearchTool, + supabase_rpc: supabaseRpcTool, + supabase_introspect: supabaseIntrospectTool, + supabase_storage_upload: supabaseStorageUploadTool, + supabase_storage_download: supabaseStorageDownloadTool, + supabase_storage_list: supabaseStorageListTool, + supabase_storage_delete: supabaseStorageDeleteTool, + supabase_storage_move: supabaseStorageMoveTool, + supabase_storage_copy: supabaseStorageCopyTool, + supabase_storage_create_bucket: supabaseStorageCreateBucketTool, + supabase_storage_list_buckets: supabaseStorageListBucketsTool, + supabase_storage_delete_bucket: supabaseStorageDeleteBucketTool, + supabase_storage_get_public_url: supabaseStorageGetPublicUrlTool, + supabase_storage_create_signed_url: supabaseStorageCreateSignedUrlTool, + calendly_get_current_user: calendlyGetCurrentUserTool, + calendly_list_event_types: calendlyListEventTypesTool, + calendly_get_event_type: calendlyGetEventTypeTool, + calendly_list_scheduled_events: calendlyListScheduledEventsTool, + calendly_get_scheduled_event: calendlyGetScheduledEventTool, + calendly_list_event_invitees: calendlyListEventInviteesTool, + calendly_cancel_event: calendlyCancelEventTool, + calendly_list_webhooks: calendlyListWebhooksTool, + calendly_create_webhook: calendlyCreateWebhookTool, + calendly_delete_webhook: calendlyDeleteWebhookTool, + calcom_create_booking: calcomCreateBookingTool, + calcom_get_booking: calcomGetBookingTool, + calcom_list_bookings: calcomListBookingsTool, + calcom_cancel_booking: calcomCancelBookingTool, + calcom_reschedule_booking: calcomRescheduleBookingTool, + calcom_confirm_booking: calcomConfirmBookingTool, + calcom_decline_booking: calcomDeclineBookingTool, + calcom_create_event_type: calcomCreateEventTypeTool, + calcom_get_event_type: calcomGetEventTypeTool, + calcom_list_event_types: calcomListEventTypesTool, + calcom_update_event_type: calcomUpdateEventTypeTool, + calcom_delete_event_type: calcomDeleteEventTypeTool, + calcom_create_schedule: calcomCreateScheduleTool, + calcom_get_schedule: calcomGetScheduleTool, + calcom_list_schedules: calcomListSchedulesTool, + calcom_update_schedule: calcomUpdateScheduleTool, + calcom_delete_schedule: calcomDeleteScheduleTool, + calcom_get_default_schedule: calcomGetDefaultScheduleTool, + calcom_get_slots: calcomGetSlotsTool, + typeform_responses: typeformResponsesTool, + typeform_files: typeformFilesTool, + typeform_insights: typeformInsightsTool, + typeform_list_forms: typeformListFormsTool, + typeform_get_form: typeformGetFormTool, + typeform_create_form: typeformCreateFormTool, + typeform_update_form: typeformUpdateFormTool, + typeform_delete_form: typeformDeleteFormTool, + youtube_channel_info: youtubeChannelInfoTool, + youtube_channel_playlists: youtubeChannelPlaylistsTool, + youtube_channel_videos: youtubeChannelVideosTool, + youtube_comments: youtubeCommentsTool, + youtube_playlist_items: youtubePlaylistItemsTool, + youtube_search: youtubeSearchTool, + youtube_trending: youtubeTrendingTool, + youtube_video_categories: youtubeVideoCategoriesTool, + youtube_video_details: youtubeVideoDetailsTool, + notion_read: notionReadTool, + notion_read_database: notionReadDatabaseTool, + notion_write: notionWriteTool, + notion_create_page: notionCreatePageTool, + notion_query_database: notionQueryDatabaseTool, + notion_search: notionSearchTool, + notion_create_database: notionCreateDatabaseTool, + // Notion V2 tools + notion_read_v2: notionReadV2Tool, + notion_read_database_v2: notionReadDatabaseV2Tool, + notion_write_v2: notionWriteV2Tool, + notion_create_page_v2: notionCreatePageV2Tool, + notion_query_database_v2: notionQueryDatabaseV2Tool, + notion_search_v2: notionSearchV2Tool, + notion_create_database_v2: notionCreateDatabaseV2Tool, + notion_update_page_v2: notionUpdatePageV2Tool, + notion_add_database_row_v2: notionAddDatabaseRowTool, + gmail_send: gmailSendTool, + gmail_send_v2: gmailSendV2Tool, + gmail_read: gmailReadTool, + gmail_read_v2: gmailReadV2Tool, + gmail_search: gmailSearchTool, + gmail_search_v2: gmailSearchV2Tool, + gmail_draft: gmailDraftTool, + gmail_draft_v2: gmailDraftV2Tool, + gmail_move: gmailMoveTool, + gmail_move_v2: gmailMoveV2Tool, + gmail_mark_read: gmailMarkReadTool, + gmail_mark_read_v2: gmailMarkReadV2Tool, + gmail_mark_unread: gmailMarkUnreadTool, + gmail_mark_unread_v2: gmailMarkUnreadV2Tool, + gmail_archive: gmailArchiveTool, + gmail_archive_v2: gmailArchiveV2Tool, + gmail_unarchive: gmailUnarchiveTool, + gmail_unarchive_v2: gmailUnarchiveV2Tool, + gmail_delete: gmailDeleteTool, + gmail_delete_v2: gmailDeleteV2Tool, + gmail_add_label: gmailAddLabelTool, + gmail_add_label_v2: gmailAddLabelV2Tool, + gmail_remove_label: gmailRemoveLabelTool, + gmail_remove_label_v2: gmailRemoveLabelV2Tool, + whatsapp_send_message: whatsappSendMessageTool, + x_write: xWriteTool, + x_read: xReadTool, + x_search: xSearchTool, + x_user: xUserTool, + pinecone_fetch: pineconeFetchTool, + pinecone_generate_embeddings: pineconeGenerateEmbeddingsTool, + pinecone_search_text: pineconeSearchTextTool, + pinecone_search_vector: pineconeSearchVectorTool, + pinecone_upsert_text: pineconeUpsertTextTool, + pipedrive_create_activity: pipedriveCreateActivityTool, + pipedrive_create_deal: pipedriveCreateDealTool, + pipedrive_create_lead: pipedriveCreateLeadTool, + pipedrive_create_project: pipedriveCreateProjectTool, + pipedrive_delete_lead: pipedriveDeleteLeadTool, + pipedrive_get_activities: pipedriveGetActivitiesTool, + pipedrive_get_all_deals: pipedriveGetAllDealsTool, + pipedrive_get_deal: pipedriveGetDealTool, + pipedrive_get_files: pipedriveGetFilesTool, + pipedrive_get_leads: pipedriveGetLeadsTool, + pipedrive_get_mail_messages: pipedriveGetMailMessagesTool, + pipedrive_get_mail_thread: pipedriveGetMailThreadTool, + pipedrive_get_pipeline_deals: pipedriveGetPipelineDealsTool, + pipedrive_get_pipelines: pipedriveGetPipelinesTool, + pipedrive_get_projects: pipedriveGetProjectsTool, + pipedrive_update_activity: pipedriveUpdateActivityTool, + pipedrive_update_deal: pipedriveUpdateDealTool, + pipedrive_update_lead: pipedriveUpdateLeadTool, + postgresql_query: postgresQueryTool, + postgresql_insert: postgresInsertTool, + postgresql_update: postgresUpdateTool, + postgresql_delete: postgresDeleteTool, + postgresql_execute: postgresExecuteTool, + postgresql_introspect: postgresIntrospectTool, + rds_query: rdsQueryTool, + rds_insert: rdsInsertTool, + rds_update: rdsUpdateTool, + rds_delete: rdsDeleteTool, + rds_execute: rdsExecuteTool, + rds_introspect: rdsIntrospectTool, + dynamodb_get: dynamodbGetTool, + dynamodb_put: dynamodbPutTool, + dynamodb_query: dynamodbQueryTool, + dynamodb_scan: dynamodbScanTool, + dynamodb_update: dynamodbUpdateTool, + dynamodb_delete: dynamodbDeleteTool, + dynamodb_introspect: dynamodbIntrospectTool, + dropbox_upload: dropboxUploadTool, + dropbox_download: dropboxDownloadTool, + dropbox_list_folder: dropboxListFolderTool, + dropbox_create_folder: dropboxCreateFolderTool, + dropbox_delete: dropboxDeleteTool, + dropbox_copy: dropboxCopyTool, + dropbox_move: dropboxMoveTool, + dropbox_get_metadata: dropboxGetMetadataTool, + dropbox_create_shared_link: dropboxCreateSharedLinkTool, + dropbox_search: dropboxSearchTool, + duckduckgo_search: duckduckgoSearchTool, + dspy_predict: predictTool, + dspy_chain_of_thought: chainOfThoughtTool, + dspy_react: reactTool, + mongodb_query: mongodbQueryTool, + mongodb_insert: mongodbInsertTool, + mongodb_update: mongodbUpdateTool, + mongodb_delete: mongodbDeleteTool, + mongodb_execute: mongodbExecuteTool, + mongodb_introspect: mongodbIntrospectTool, + mysql_query: mysqlQueryTool, + mysql_insert: mysqlInsertTool, + mysql_update: mysqlUpdateTool, + mysql_delete: mysqlDeleteTool, + mysql_execute: mysqlExecuteTool, + mysql_introspect: mysqlIntrospectTool, + neo4j_query: neo4jQueryTool, + neo4j_create: neo4jCreateTool, + neo4j_merge: neo4jMergeTool, + neo4j_update: neo4jUpdateTool, + neo4j_delete: neo4jDeleteTool, + neo4j_execute: neo4jExecuteTool, + neo4j_introspect: neo4jIntrospectTool, + github_pr: githubPrTool, + github_pr_v2: githubPrV2Tool, + github_comment: githubCommentTool, + github_comment_v2: githubCommentV2Tool, + github_issue_comment: githubIssueCommentTool, + github_issue_comment_v2: githubIssueCommentV2Tool, + github_list_issue_comments: githubListIssueCommentsTool, + github_list_issue_comments_v2: githubListIssueCommentsV2Tool, + github_update_comment: githubUpdateCommentTool, + github_update_comment_v2: githubUpdateCommentV2Tool, + github_delete_comment: githubDeleteCommentTool, + github_delete_comment_v2: githubDeleteCommentV2Tool, + github_list_pr_comments: githubListPRCommentsTool, + github_list_pr_comments_v2: githubListPRCommentsV2Tool, + github_create_pr: githubCreatePRTool, + github_create_pr_v2: githubCreatePRV2Tool, + github_update_pr: githubUpdatePRTool, + github_update_pr_v2: githubUpdatePRV2Tool, + github_merge_pr: githubMergePRTool, + github_merge_pr_v2: githubMergePRV2Tool, + github_list_prs: githubListPRsTool, + github_list_prs_v2: githubListPRsV2Tool, + github_get_pr_files: githubGetPRFilesTool, + github_get_pr_files_v2: githubGetPRFilesV2Tool, + github_close_pr: githubClosePRTool, + github_close_pr_v2: githubClosePRV2Tool, + github_request_reviewers: githubRequestReviewersTool, + github_request_reviewers_v2: githubRequestReviewersV2Tool, + github_get_file_content: githubGetFileContentTool, + github_get_file_content_v2: githubGetFileContentV2Tool, + github_create_file: githubCreateFileTool, + github_create_file_v2: githubCreateFileV2Tool, + github_update_file: githubUpdateFileTool, + github_update_file_v2: githubUpdateFileV2Tool, + github_delete_file: githubDeleteFileTool, + github_delete_file_v2: githubDeleteFileV2Tool, + github_get_tree: githubGetTreeTool, + github_get_tree_v2: githubGetTreeV2Tool, + github_list_branches: githubListBranchesTool, + github_list_branches_v2: githubListBranchesV2Tool, + github_get_branch: githubGetBranchTool, + github_get_branch_v2: githubGetBranchV2Tool, + github_create_branch: githubCreateBranchTool, + github_create_branch_v2: githubCreateBranchV2Tool, + github_delete_branch: githubDeleteBranchTool, + github_delete_branch_v2: githubDeleteBranchV2Tool, + github_get_branch_protection: githubGetBranchProtectionTool, + github_get_branch_protection_v2: githubGetBranchProtectionV2Tool, + github_update_branch_protection: githubUpdateBranchProtectionTool, + github_update_branch_protection_v2: githubUpdateBranchProtectionV2Tool, + github_create_issue: githubCreateIssueTool, + github_create_issue_v2: githubCreateIssueV2Tool, + github_update_issue: githubUpdateIssueTool, + github_update_issue_v2: githubUpdateIssueV2Tool, + github_list_issues: githubListIssuesTool, + github_list_issues_v2: githubListIssuesV2Tool, + github_get_issue: githubGetIssueTool, + github_get_issue_v2: githubGetIssueV2Tool, + github_close_issue: githubCloseIssueTool, + github_close_issue_v2: githubCloseIssueV2Tool, + github_add_labels: githubAddLabelsTool, + github_add_labels_v2: githubAddLabelsV2Tool, + github_remove_label: githubRemoveLabelTool, + github_remove_label_v2: githubRemoveLabelV2Tool, + github_add_assignees: githubAddAssigneesTool, + github_add_assignees_v2: githubAddAssigneesV2Tool, + github_create_release: githubCreateReleaseTool, + github_create_release_v2: githubCreateReleaseV2Tool, + github_update_release: githubUpdateReleaseTool, + github_update_release_v2: githubUpdateReleaseV2Tool, + github_list_releases: githubListReleasesTool, + github_list_releases_v2: githubListReleasesV2Tool, + github_get_release: githubGetReleaseTool, + github_get_release_v2: githubGetReleaseV2Tool, + github_delete_release: githubDeleteReleaseTool, + github_delete_release_v2: githubDeleteReleaseV2Tool, + github_list_workflows: githubListWorkflowsTool, + github_list_workflows_v2: githubListWorkflowsV2Tool, + github_get_workflow: githubGetWorkflowTool, + github_get_workflow_v2: githubGetWorkflowV2Tool, + github_trigger_workflow: githubTriggerWorkflowTool, + github_trigger_workflow_v2: githubTriggerWorkflowV2Tool, + github_list_workflow_runs: githubListWorkflowRunsTool, + github_list_workflow_runs_v2: githubListWorkflowRunsV2Tool, + github_get_workflow_run: githubGetWorkflowRunTool, + github_get_workflow_run_v2: githubGetWorkflowRunV2Tool, + github_cancel_workflow_run: githubCancelWorkflowRunTool, + github_cancel_workflow_run_v2: githubCancelWorkflowRunV2Tool, + github_rerun_workflow: githubRerunWorkflowTool, + github_rerun_workflow_v2: githubRerunWorkflowV2Tool, + github_list_projects: githubListProjectsTool, + github_list_projects_v2: githubListProjectsV2Tool, + github_get_project: githubGetProjectTool, + github_get_project_v2: githubGetProjectV2Tool, + github_create_project: githubCreateProjectTool, + github_create_project_v2: githubCreateProjectV2Tool, + github_update_project: githubUpdateProjectTool, + github_update_project_v2: githubUpdateProjectV2Tool, + github_delete_project: githubDeleteProjectTool, + github_delete_project_v2: githubDeleteProjectV2Tool, + // New GitHub tools - Search + github_search_code: githubSearchCodeTool, + github_search_code_v2: githubSearchCodeV2Tool, + github_search_commits: githubSearchCommitsTool, + github_search_commits_v2: githubSearchCommitsV2Tool, + github_search_issues: githubSearchIssuesTool, + github_search_issues_v2: githubSearchIssuesV2Tool, + github_search_repos: githubSearchReposTool, + github_search_repos_v2: githubSearchReposV2Tool, + github_search_users: githubSearchUsersTool, + github_search_users_v2: githubSearchUsersV2Tool, + // New GitHub tools - Commits + github_list_commits: githubListCommitsTool, + github_list_commits_v2: githubListCommitsV2Tool, + github_get_commit: githubGetCommitTool, + github_get_commit_v2: githubGetCommitV2Tool, + github_compare_commits: githubCompareCommitsTool, + github_compare_commits_v2: githubCompareCommitsV2Tool, + // New GitHub tools - Gists + github_create_gist: githubCreateGistTool, + github_create_gist_v2: githubCreateGistV2Tool, + github_get_gist: githubGetGistTool, + github_get_gist_v2: githubGetGistV2Tool, + github_list_gists: githubListGistsTool, + github_list_gists_v2: githubListGistsV2Tool, + github_update_gist: githubUpdateGistTool, + github_update_gist_v2: githubUpdateGistV2Tool, + github_delete_gist: githubDeleteGistTool, + github_delete_gist_v2: githubDeleteGistV2Tool, + github_fork_gist: githubForkGistTool, + github_fork_gist_v2: githubForkGistV2Tool, + github_star_gist: githubStarGistTool, + github_star_gist_v2: githubStarGistV2Tool, + github_unstar_gist: githubUnstarGistTool, + github_unstar_gist_v2: githubUnstarGistV2Tool, + // New GitHub tools - Forks + github_fork_repo: githubForkRepoTool, + github_fork_repo_v2: githubForkRepoV2Tool, + github_list_forks: githubListForksTool, + github_list_forks_v2: githubListForksV2Tool, + // New GitHub tools - Milestones + github_create_milestone: githubCreateMilestoneTool, + github_create_milestone_v2: githubCreateMilestoneV2Tool, + github_get_milestone: githubGetMilestoneTool, + github_get_milestone_v2: githubGetMilestoneV2Tool, + github_list_milestones: githubListMilestonesTool, + github_list_milestones_v2: githubListMilestonesV2Tool, + github_update_milestone: githubUpdateMilestoneTool, + github_update_milestone_v2: githubUpdateMilestoneV2Tool, + github_delete_milestone: githubDeleteMilestoneTool, + github_delete_milestone_v2: githubDeleteMilestoneV2Tool, + // New GitHub tools - Reactions + github_create_issue_reaction: githubCreateIssueReactionTool, + github_create_issue_reaction_v2: githubCreateIssueReactionV2Tool, + github_delete_issue_reaction: githubDeleteIssueReactionTool, + github_delete_issue_reaction_v2: githubDeleteIssueReactionV2Tool, + github_create_comment_reaction: githubCreateCommentReactionTool, + github_create_comment_reaction_v2: githubCreateCommentReactionV2Tool, + github_delete_comment_reaction: githubDeleteCommentReactionTool, + github_delete_comment_reaction_v2: githubDeleteCommentReactionV2Tool, + // New GitHub tools - Stars + github_star_repo: githubStarRepoTool, + github_star_repo_v2: githubStarRepoV2Tool, + github_unstar_repo: githubUnstarRepoTool, + github_unstar_repo_v2: githubUnstarRepoV2Tool, + github_check_star: githubCheckStarTool, + github_check_star_v2: githubCheckStarV2Tool, + github_list_stargazers: githubListStargazersTool, + github_list_stargazers_v2: githubListStargazersV2Tool, + gitlab_list_projects: gitlabListProjectsTool, + gitlab_get_project: gitlabGetProjectTool, + gitlab_list_issues: gitlabListIssuesTool, + gitlab_get_issue: gitlabGetIssueTool, + gitlab_create_issue: gitlabCreateIssueTool, + gitlab_update_issue: gitlabUpdateIssueTool, + gitlab_delete_issue: gitlabDeleteIssueTool, + gitlab_create_issue_note: gitlabCreateIssueNoteTool, + gitlab_list_merge_requests: gitlabListMergeRequestsTool, + gitlab_get_merge_request: gitlabGetMergeRequestTool, + gitlab_create_merge_request: gitlabCreateMergeRequestTool, + gitlab_update_merge_request: gitlabUpdateMergeRequestTool, + gitlab_merge_merge_request: gitlabMergeMergeRequestTool, + gitlab_create_merge_request_note: gitlabCreateMergeRequestNoteTool, + gitlab_list_pipelines: gitlabListPipelinesTool, + gitlab_get_pipeline: gitlabGetPipelineTool, + gitlab_create_pipeline: gitlabCreatePipelineTool, + gitlab_retry_pipeline: gitlabRetryPipelineTool, + gitlab_cancel_pipeline: gitlabCancelPipelineTool, + grain_list_recordings: grainListRecordingsTool, + grain_get_recording: grainGetRecordingTool, + grain_get_transcript: grainGetTranscriptTool, + grain_list_teams: grainListTeamsTool, + grain_list_meeting_types: grainListMeetingTypesTool, + grain_create_hook: grainCreateHookTool, + grain_list_hooks: grainListHooksTool, + grain_delete_hook: grainDeleteHookTool, + greptile_query: greptileQueryTool, + greptile_search: greptileSearchTool, + greptile_index_repo: greptileIndexRepoTool, + greptile_status: greptileStatusTool, + elasticsearch_search: elasticsearchSearchTool, + elasticsearch_index_document: elasticsearchIndexDocumentTool, + elasticsearch_get_document: elasticsearchGetDocumentTool, + elasticsearch_update_document: elasticsearchUpdateDocumentTool, + elasticsearch_delete_document: elasticsearchDeleteDocumentTool, + elasticsearch_bulk: elasticsearchBulkTool, + elasticsearch_count: elasticsearchCountTool, + elasticsearch_create_index: elasticsearchCreateIndexTool, + elasticsearch_delete_index: elasticsearchDeleteIndexTool, + elasticsearch_get_index: elasticsearchGetIndexTool, + elasticsearch_list_indices: elasticsearchListIndicesTool, + elasticsearch_cluster_health: elasticsearchClusterHealthTool, + elasticsearch_cluster_stats: elasticsearchClusterStatsTool, + enrich_check_credits: enrichCheckCreditsTool, + enrich_company_funding: enrichCompanyFundingTool, + enrich_company_lookup: enrichCompanyLookupTool, + enrich_company_revenue: enrichCompanyRevenueTool, + enrich_disposable_email_check: enrichDisposableEmailCheckTool, + enrich_email_to_ip: enrichEmailToIpTool, + enrich_email_to_person_lite: enrichEmailToPersonLiteTool, + enrich_email_to_phone: enrichEmailToPhoneTool, + enrich_email_to_profile: enrichEmailToProfileTool, + enrich_find_email: enrichFindEmailTool, + enrich_get_post_details: enrichGetPostDetailsTool, + enrich_ip_to_company: enrichIpToCompanyTool, + enrich_linkedin_profile: enrichLinkedInProfileTool, + enrich_linkedin_to_personal_email: enrichLinkedInToPersonalEmailTool, + enrich_linkedin_to_work_email: enrichLinkedInToWorkEmailTool, + enrich_phone_finder: enrichPhoneFinderTool, + enrich_reverse_hash_lookup: enrichReverseHashLookupTool, + enrich_sales_pointer_people: enrichSalesPointerPeopleTool, + enrich_search_company: enrichSearchCompanyTool, + enrich_search_company_activities: enrichSearchCompanyActivitiesTool, + enrich_search_company_employees: enrichSearchCompanyEmployeesTool, + enrich_search_logo: enrichSearchLogoTool, + enrich_search_people: enrichSearchPeopleTool, + enrich_search_people_activities: enrichSearchPeopleActivitiesTool, + enrich_search_post_comments: enrichSearchPostCommentsTool, + enrich_search_post_reactions: enrichSearchPostReactionsTool, + enrich_search_posts: enrichSearchPostsTool, + enrich_search_similar_companies: enrichSearchSimilarCompaniesTool, + enrich_verify_email: enrichVerifyEmailTool, + exa_search: exaSearchTool, + exa_get_contents: exaGetContentsTool, + exa_find_similar_links: exaFindSimilarLinksTool, + exa_answer: exaAnswerTool, + exa_research: exaResearchTool, + incidentio_escalations_list: incidentioEscalationsListTool, + incidentio_escalations_create: incidentioEscalationsCreateTool, + incidentio_escalations_show: incidentioEscalationsShowTool, + incidentio_schedules_list: incidentioSchedulesListTool, + incidentio_schedules_create: incidentioSchedulesCreateTool, + incidentio_schedules_show: incidentioSchedulesShowTool, + incidentio_schedules_update: incidentioSchedulesUpdateTool, + incidentio_schedules_delete: incidentioSchedulesDeleteTool, + incidentio_custom_fields_create: incidentioCustomFieldsCreateTool, + incidentio_custom_fields_show: incidentioCustomFieldsShowTool, + incidentio_custom_fields_update: incidentioCustomFieldsUpdateTool, + incidentio_custom_fields_delete: incidentioCustomFieldsDeleteTool, + parallel_search: parallelSearchTool, + parallel_extract: parallelExtractTool, + parallel_deep_research: parallelDeepResearchTool, + reddit_hot_posts: redditHotPostsTool, + reddit_get_posts: redditGetPostsTool, + reddit_get_comments: redditGetCommentsTool, + reddit_get_controversial: redditGetControversialTool, + reddit_search: redditSearchTool, + reddit_submit_post: redditSubmitPostTool, + reddit_vote: redditVoteTool, + reddit_save: redditSaveTool, + reddit_unsave: redditUnsaveTool, + reddit_reply: redditReplyTool, + reddit_edit: redditEditTool, + reddit_delete: redditDeleteTool, + reddit_subscribe: redditSubscribeTool, + google_drive_copy: googleDriveCopyTool, + google_drive_create_folder: googleDriveCreateFolderTool, + google_drive_delete: googleDriveDeleteTool, + google_drive_download: googleDriveDownloadTool, + google_drive_get_about: googleDriveGetAboutTool, + google_drive_get_content: googleDriveGetContentTool, + google_drive_get_file: googleDriveGetFileTool, + google_drive_list: googleDriveListTool, + google_drive_list_permissions: googleDriveListPermissionsTool, + google_drive_share: googleDriveShareTool, + google_drive_trash: googleDriveTrashTool, + google_drive_unshare: googleDriveUnshareTool, + google_drive_untrash: googleDriveUntrashTool, + google_drive_update: googleDriveUpdateTool, + google_drive_upload: googleDriveUploadTool, + google_docs_read: googleDocsReadTool, + google_docs_write: googleDocsWriteTool, + google_docs_create: googleDocsCreateTool, + google_maps_air_quality: googleMapsAirQualityTool, + google_maps_directions: googleMapsDirectionsTool, + google_maps_distance_matrix: googleMapsDistanceMatrixTool, + google_maps_elevation: googleMapsElevationTool, + google_maps_geocode: googleMapsGeocodeTool, + google_maps_geolocate: googleMapsGeolocateTool, + google_maps_place_details: googleMapsPlaceDetailsTool, + google_maps_places_search: googleMapsPlacesSearchTool, + google_maps_reverse_geocode: googleMapsReverseGeocodeTool, + google_maps_snap_to_roads: googleMapsSnapToRoadsTool, + google_maps_speed_limits: googleMapsSpeedLimitsTool, + google_maps_timezone: googleMapsTimezoneTool, + google_maps_validate_address: googleMapsValidateAddressTool, + google_sheets_read: googleSheetsReadTool, + google_sheets_write: googleSheetsWriteTool, + google_sheets_update: googleSheetsUpdateTool, + google_sheets_append: googleSheetsAppendTool, + google_sheets_read_v2: googleSheetsReadV2Tool, + google_sheets_write_v2: googleSheetsWriteV2Tool, + google_sheets_update_v2: googleSheetsUpdateV2Tool, + google_sheets_append_v2: googleSheetsAppendV2Tool, + google_sheets_clear_v2: googleSheetsClearV2Tool, + google_sheets_get_spreadsheet_v2: googleSheetsGetSpreadsheetV2Tool, + google_sheets_create_spreadsheet_v2: googleSheetsCreateSpreadsheetV2Tool, + google_sheets_batch_get_v2: googleSheetsBatchGetV2Tool, + google_sheets_batch_update_v2: googleSheetsBatchUpdateV2Tool, + google_sheets_batch_clear_v2: googleSheetsBatchClearV2Tool, + google_sheets_copy_sheet_v2: googleSheetsCopySheetV2Tool, + google_slides_read: googleSlidesReadTool, + google_slides_write: googleSlidesWriteTool, + google_slides_create: googleSlidesCreateTool, + google_slides_replace_all_text: googleSlidesReplaceAllTextTool, + google_slides_add_slide: googleSlidesAddSlideTool, + google_slides_get_thumbnail: googleSlidesGetThumbnailTool, + google_slides_add_image: googleSlidesAddImageTool, + google_slides_get_page: googleSlidesGetPageTool, + google_slides_delete_object: googleSlidesDeleteObjectTool, + google_slides_duplicate_object: googleSlidesDuplicateObjectTool, + google_slides_update_slides_position: googleSlidesUpdateSlidesPositionTool, + google_slides_create_table: googleSlidesCreateTableTool, + google_slides_create_shape: googleSlidesCreateShapeTool, + google_slides_insert_text: googleSlidesInsertTextTool, + perplexity_chat: perplexityChatTool, + perplexity_search: perplexitySearchTool, + pulse_parser: pulseParserTool, + pulse_parser_v2: pulseParserV2Tool, + posthog_capture_event: posthogCaptureEventTool, + posthog_batch_events: posthogBatchEventsTool, + posthog_list_persons: posthogListPersonsTool, + posthog_get_person: posthogGetPersonTool, + posthog_delete_person: posthogDeletePersonTool, + posthog_query: posthogQueryTool, + posthog_list_insights: posthogListInsightsTool, + posthog_get_insight: posthogGetInsightTool, + posthog_create_insight: posthogCreateInsightTool, + posthog_list_dashboards: posthogListDashboardsTool, + posthog_get_dashboard: posthogGetDashboardTool, + posthog_list_actions: posthogListActionsTool, + posthog_list_cohorts: posthogListCohortsTool, + posthog_get_cohort: posthogGetCohortTool, + posthog_create_cohort: posthogCreateCohortTool, + posthog_list_annotations: posthogListAnnotationsTool, + posthog_create_annotation: posthogCreateAnnotationTool, + posthog_list_feature_flags: posthogListFeatureFlagsTool, + posthog_get_feature_flag: posthogGetFeatureFlagTool, + posthog_create_feature_flag: posthogCreateFeatureFlagTool, + posthog_update_feature_flag: posthogUpdateFeatureFlagTool, + posthog_delete_feature_flag: posthogDeleteFeatureFlagTool, + posthog_evaluate_flags: posthogEvaluateFlagsTool, + posthog_list_experiments: posthogListExperimentsTool, + posthog_get_experiment: posthogGetExperimentTool, + posthog_create_experiment: posthogCreateExperimentTool, + posthog_list_surveys: posthogListSurveysTool, + posthog_get_survey: posthogGetSurveyTool, + posthog_create_survey: posthogCreateSurveyTool, + posthog_update_survey: posthogUpdateSurveyTool, + posthog_list_session_recordings: posthogListSessionRecordingsTool, + posthog_get_session_recording: posthogGetSessionRecordingTool, + posthog_list_recording_playlists: posthogListRecordingPlaylistsTool, + posthog_list_event_definitions: posthogListEventDefinitionsTool, + posthog_get_event_definition: posthogGetEventDefinitionTool, + posthog_update_event_definition: posthogUpdateEventDefinitionTool, + posthog_list_property_definitions: posthogListPropertyDefinitionsTool, + posthog_get_property_definition: posthogGetPropertyDefinitionTool, + posthog_update_property_definition: posthogUpdatePropertyDefinitionTool, + posthog_list_projects: posthogListProjectsTool, + posthog_get_project: posthogGetProjectTool, + posthog_list_organizations: posthogListOrganizationsTool, + posthog_get_organization: posthogGetOrganizationTool, + confluence_retrieve: confluenceRetrieveTool, + confluence_update: confluenceUpdateTool, + confluence_create_page: confluenceCreatePageTool, + confluence_delete_page: confluenceDeletePageTool, + confluence_list_pages_in_space: confluenceListPagesInSpaceTool, + confluence_get_page_children: confluenceGetPageChildrenTool, + confluence_get_page_ancestors: confluenceGetPageAncestorsTool, + confluence_list_page_versions: confluenceListPageVersionsTool, + confluence_get_page_version: confluenceGetPageVersionTool, + confluence_list_page_properties: confluenceListPagePropertiesTool, + confluence_create_page_property: confluenceCreatePagePropertyTool, + confluence_list_blogposts: confluenceListBlogPostsTool, + confluence_get_blogpost: confluenceGetBlogPostTool, + confluence_create_blogpost: confluenceCreateBlogPostTool, + confluence_list_blogposts_in_space: confluenceListBlogPostsInSpaceTool, + confluence_search: confluenceSearchTool, + confluence_search_in_space: confluenceSearchInSpaceTool, + confluence_create_comment: confluenceCreateCommentTool, + confluence_list_comments: confluenceListCommentsTool, + confluence_update_comment: confluenceUpdateCommentTool, + confluence_delete_comment: confluenceDeleteCommentTool, + confluence_list_attachments: confluenceListAttachmentsTool, + confluence_upload_attachment: confluenceUploadAttachmentTool, + confluence_delete_attachment: confluenceDeleteAttachmentTool, + confluence_list_labels: confluenceListLabelsTool, + confluence_add_label: confluenceAddLabelTool, + confluence_get_space: confluenceGetSpaceTool, + confluence_list_spaces: confluenceListSpacesTool, + cursor_list_agents: cursorListAgentsTool, + cursor_list_agents_v2: cursorListAgentsV2Tool, + cursor_get_agent: cursorGetAgentTool, + cursor_get_agent_v2: cursorGetAgentV2Tool, + cursor_get_conversation: cursorGetConversationTool, + cursor_get_conversation_v2: cursorGetConversationV2Tool, + cursor_launch_agent: cursorLaunchAgentTool, + cursor_launch_agent_v2: cursorLaunchAgentV2Tool, + cursor_add_followup: cursorAddFollowupTool, + cursor_add_followup_v2: cursorAddFollowupV2Tool, + cursor_stop_agent: cursorStopAgentTool, + cursor_stop_agent_v2: cursorStopAgentV2Tool, + cursor_delete_agent: cursorDeleteAgentTool, + cursor_delete_agent_v2: cursorDeleteAgentV2Tool, + trello_list_lists: trelloListListsTool, + trello_list_cards: trelloListCardsTool, + trello_create_card: trelloCreateCardTool, + trello_update_card: trelloUpdateCardTool, + trello_get_actions: trelloGetActionsTool, + trello_add_comment: trelloAddCommentTool, + twilio_send_sms: sendSMSTool, + twilio_voice_make_call: makeCallTool, + twilio_voice_list_calls: listCallsTool, + twilio_voice_get_recording: getRecordingTool, + airtable_create_records: airtableCreateRecordsTool, + airtable_get_record: airtableGetRecordTool, + airtable_list_records: airtableListRecordsTool, + airtable_update_record: airtableUpdateRecordTool, + ahrefs_domain_rating: ahrefsDomainRatingTool, + ahrefs_backlinks: ahrefsBacklinksTool, + ahrefs_backlinks_stats: ahrefsBacklinksStatsTool, + ahrefs_referring_domains: ahrefsReferringDomainsTool, + ahrefs_organic_keywords: ahrefsOrganicKeywordsTool, + ahrefs_top_pages: ahrefsTopPagesTool, + ahrefs_keyword_overview: ahrefsKeywordOverviewTool, + ahrefs_broken_backlinks: ahrefsBrokenBacklinksTool, + apify_run_actor_sync: apifyRunActorSyncTool, + apify_run_actor_async: apifyRunActorAsyncTool, + apollo_people_search: apolloPeopleSearchTool, + apollo_people_enrich: apolloPeopleEnrichTool, + apollo_people_bulk_enrich: apolloPeopleBulkEnrichTool, + apollo_organization_search: apolloOrganizationSearchTool, + apollo_organization_enrich: apolloOrganizationEnrichTool, + apollo_organization_bulk_enrich: apolloOrganizationBulkEnrichTool, + apollo_contact_create: apolloContactCreateTool, + apollo_contact_update: apolloContactUpdateTool, + apollo_contact_search: apolloContactSearchTool, + apollo_contact_bulk_create: apolloContactBulkCreateTool, + apollo_contact_bulk_update: apolloContactBulkUpdateTool, + apollo_account_create: apolloAccountCreateTool, + apollo_account_update: apolloAccountUpdateTool, + apollo_account_search: apolloAccountSearchTool, + apollo_account_bulk_create: apolloAccountBulkCreateTool, + apollo_account_bulk_update: apolloAccountBulkUpdateTool, + apollo_opportunity_create: apolloOpportunityCreateTool, + apollo_opportunity_search: apolloOpportunitySearchTool, + apollo_opportunity_get: apolloOpportunityGetTool, + apollo_opportunity_update: apolloOpportunityUpdateTool, + apollo_sequence_add_contacts: apolloSequenceAddContactsTool, + apollo_sequence_search: apolloSequenceSearchTool, + apollo_task_create: apolloTaskCreateTool, + apollo_task_search: apolloTaskSearchTool, + apollo_email_accounts: apolloEmailAccountsTool, + mistral_parser: mistralParserTool, + mistral_parser_v2: mistralParserV2Tool, + mistral_parser_v3: mistralParserV3Tool, + reducto_parser: reductoParserTool, + reducto_parser_v2: reductoParserV2Tool, + textract_parser: textractParserTool, + textract_parser_v2: textractParserV2Tool, + thinking_tool: thinkingTool, + tinybird_events: tinybirdEventsTool, + tinybird_query: tinybirdQueryTool, + stagehand_extract: stagehandExtractTool, + stagehand_agent: stagehandAgentTool, + mem0_add_memories: mem0AddMemoriesTool, + mem0_search_memories: mem0SearchMemoriesTool, + mem0_get_memories: mem0GetMemoriesTool, + zep_create_thread: zepCreateThreadTool, + zep_get_threads: zepGetThreadsTool, + zep_delete_thread: zepDeleteThreadTool, + zep_get_context: zepGetContextTool, + zep_get_messages: zepGetMessagesTool, + zep_add_messages: zepAddMessagesTool, + zep_add_user: zepAddUserTool, + zep_get_user: zepGetUserTool, + zep_get_user_threads: zepGetUserThreadsTool, + memory_add: memoryAddTool, + memory_get: memoryGetTool, + memory_get_all: memoryGetAllTool, + memory_delete: memoryDeleteTool, + knowledge_search: knowledgeSearchTool, + knowledge_upload_chunk: knowledgeUploadChunkTool, + knowledge_create_document: knowledgeCreateDocumentTool, + search_tool: searchTool, + elevenlabs_tts: elevenLabsTtsTool, + stt_whisper: whisperSttTool, + stt_whisper_v2: whisperSttV2Tool, + stt_deepgram: deepgramSttTool, + stt_deepgram_v2: deepgramSttV2Tool, + stt_elevenlabs: elevenLabsSttTool, + stt_elevenlabs_v2: elevenLabsSttV2Tool, + stt_assemblyai: assemblyaiSttTool, + stt_assemblyai_v2: assemblyaiSttV2Tool, + stt_gemini: geminiSttTool, + stt_gemini_v2: geminiSttV2Tool, + tts_openai: openaiTtsTool, + tts_deepgram: deepgramTtsTool, + tts_elevenlabs: elevenLabsTtsUnifiedTool, + tts_cartesia: cartesiaTtsTool, + tts_google: googleTtsTool, + tts_azure: azureTtsTool, + tts_playht: playhtTtsTool, + video_runway: runwayVideoTool, + video_veo: veoVideoTool, + video_luma: lumaVideoTool, + video_minimax: minimaxVideoTool, + video_falai: falaiVideoTool, + s3_get_object: s3GetObjectTool, + s3_put_object: s3PutObjectTool, + s3_list_objects: s3ListObjectsTool, + s3_delete_object: s3DeleteObjectTool, + s3_copy_object: s3CopyObjectTool, + telegram_message: telegramMessageTool, + telegram_delete_message: telegramDeleteMessageTool, + telegram_send_audio: telegramSendAudioTool, + telegram_send_animation: telegramSendAnimationTool, + telegram_send_photo: telegramSendPhotoTool, + telegram_send_video: telegramSendVideoTool, + telegram_send_document: telegramSendDocumentTool, + clay_populate: clayPopulateTool, + clerk_list_users: clerkListUsersTool, + clerk_get_user: clerkGetUserTool, + clerk_create_user: clerkCreateUserTool, + clerk_update_user: clerkUpdateUserTool, + clerk_delete_user: clerkDeleteUserTool, + clerk_list_organizations: clerkListOrganizationsTool, + clerk_get_organization: clerkGetOrganizationTool, + clerk_create_organization: clerkCreateOrganizationTool, + clerk_list_sessions: clerkListSessionsTool, + clerk_get_session: clerkGetSessionTool, + clerk_revoke_session: clerkRevokeSessionTool, + discord_send_message: discordSendMessageTool, + discord_get_messages: discordGetMessagesTool, + discord_get_server: discordGetServerTool, + discord_get_user: discordGetUserTool, + discord_edit_message: discordEditMessageTool, + discord_delete_message: discordDeleteMessageTool, + discord_add_reaction: discordAddReactionTool, + discord_remove_reaction: discordRemoveReactionTool, + discord_pin_message: discordPinMessageTool, + discord_unpin_message: discordUnpinMessageTool, + discord_create_thread: discordCreateThreadTool, + discord_join_thread: discordJoinThreadTool, + discord_leave_thread: discordLeaveThreadTool, + discord_archive_thread: discordArchiveThreadTool, + discord_create_channel: discordCreateChannelTool, + discord_update_channel: discordUpdateChannelTool, + discord_delete_channel: discordDeleteChannelTool, + discord_get_channel: discordGetChannelTool, + discord_create_role: discordCreateRoleTool, + discord_update_role: discordUpdateRoleTool, + discord_delete_role: discordDeleteRoleTool, + discord_assign_role: discordAssignRoleTool, + discord_remove_role: discordRemoveRoleTool, + discord_kick_member: discordKickMemberTool, + discord_ban_member: discordBanMemberTool, + discord_unban_member: discordUnbanMemberTool, + discord_get_member: discordGetMemberTool, + discord_update_member: discordUpdateMemberTool, + discord_create_invite: discordCreateInviteTool, + discord_get_invite: discordGetInviteTool, + discord_delete_invite: discordDeleteInviteTool, + discord_create_webhook: discordCreateWebhookTool, + discord_execute_webhook: discordExecuteWebhookTool, + discord_get_webhook: discordGetWebhookTool, + discord_delete_webhook: discordDeleteWebhookTool, + datadog_submit_metrics: datadogSubmitMetricsTool, + datadog_query_timeseries: datadogQueryTimeseriesTool, + datadog_create_event: datadogCreateEventTool, + datadog_create_monitor: datadogCreateMonitorTool, + datadog_get_monitor: datadogGetMonitorTool, + datadog_list_monitors: datadogListMonitorsTool, + datadog_mute_monitor: datadogMuteMonitorTool, + datadog_query_logs: datadogQueryLogsTool, + datadog_send_logs: datadogSendLogsTool, + datadog_create_downtime: datadogCreateDowntimeTool, + datadog_list_downtimes: datadogListDowntimesTool, + datadog_cancel_downtime: datadogCancelDowntimeTool, + openai_image: openAIImageTool, + microsoft_teams_read_chat: microsoftTeamsReadChatTool, + microsoft_teams_write_chat: microsoftTeamsWriteChatTool, + microsoft_teams_read_channel: microsoftTeamsReadChannelTool, + microsoft_teams_write_channel: microsoftTeamsWriteChannelTool, + microsoft_teams_update_chat_message: microsoftTeamsUpdateChatMessageTool, + microsoft_teams_update_channel_message: microsoftTeamsUpdateChannelMessageTool, + microsoft_teams_delete_chat_message: microsoftTeamsDeleteChatMessageTool, + microsoft_teams_delete_channel_message: microsoftTeamsDeleteChannelMessageTool, + microsoft_teams_reply_to_message: microsoftTeamsReplyToMessageTool, + microsoft_teams_set_reaction: microsoftTeamsSetReactionTool, + microsoft_teams_unset_reaction: microsoftTeamsUnsetReactionTool, + microsoft_teams_get_message: microsoftTeamsGetMessageTool, + microsoft_teams_list_team_members: microsoftTeamsListTeamMembersTool, + microsoft_teams_list_channel_members: microsoftTeamsListChannelMembersTool, + outlook_read: outlookReadTool, + outlook_send: outlookSendTool, + outlook_draft: outlookDraftTool, + outlook_forward: outlookForwardTool, + outlook_move: outlookMoveTool, + outlook_mark_read: outlookMarkReadTool, + outlook_mark_unread: outlookMarkUnreadTool, + outlook_delete: outlookDeleteTool, + outlook_copy: outlookCopyTool, + linear_read_issues: linearReadIssuesTool, + linear_create_issue: linearCreateIssueTool, + linear_get_issue: linearGetIssueTool, + linear_update_issue: linearUpdateIssueTool, + linear_archive_issue: linearArchiveIssueTool, + linear_unarchive_issue: linearUnarchiveIssueTool, + linear_delete_issue: linearDeleteIssueTool, + linear_add_label_to_issue: linearAddLabelToIssueTool, + linear_remove_label_from_issue: linearRemoveLabelFromIssueTool, + linear_search_issues: linearSearchIssuesTool, + linear_create_comment: linearCreateCommentTool, + linear_update_comment: linearUpdateCommentTool, + linear_delete_comment: linearDeleteCommentTool, + linear_list_comments: linearListCommentsTool, + linear_list_projects: linearListProjectsTool, + linear_get_project: linearGetProjectTool, + linear_create_project: linearCreateProjectTool, + linear_update_project: linearUpdateProjectTool, + linear_archive_project: linearArchiveProjectTool, + linear_list_users: linearListUsersTool, + linear_list_teams: linearListTeamsTool, + linear_get_viewer: linearGetViewerTool, + linear_list_labels: linearListLabelsTool, + linear_create_label: linearCreateLabelTool, + linear_update_label: linearUpdateLabelTool, + linear_archive_label: linearArchiveLabelTool, + linear_list_workflow_states: linearListWorkflowStatesTool, + linear_create_workflow_state: linearCreateWorkflowStateTool, + linear_update_workflow_state: linearUpdateWorkflowStateTool, + linear_list_cycles: linearListCyclesTool, + linear_get_cycle: linearGetCycleTool, + linear_create_cycle: linearCreateCycleTool, + linear_get_active_cycle: linearGetActiveCycleTool, + linear_create_attachment: linearCreateAttachmentTool, + linear_list_attachments: linearListAttachmentsTool, + linear_update_attachment: linearUpdateAttachmentTool, + linear_delete_attachment: linearDeleteAttachmentTool, + linear_create_issue_relation: linearCreateIssueRelationTool, + linear_list_issue_relations: linearListIssueRelationsTool, + linear_delete_issue_relation: linearDeleteIssueRelationTool, + linear_create_favorite: linearCreateFavoriteTool, + linear_list_favorites: linearListFavoritesTool, + linear_create_project_update: linearCreateProjectUpdateTool, + linear_list_project_updates: linearListProjectUpdatesTool, + linear_list_notifications: linearListNotificationsTool, + linear_update_notification: linearUpdateNotificationTool, + linear_create_customer: linearCreateCustomerTool, + linear_list_customers: linearListCustomersTool, + linear_create_customer_request: linearCreateCustomerRequestTool, + linear_update_customer_request: linearUpdateCustomerRequestTool, + linear_list_customer_requests: linearListCustomerRequestsTool, + linear_get_customer: linearGetCustomerTool, + linear_update_customer: linearUpdateCustomerTool, + linear_delete_customer: linearDeleteCustomerTool, + linear_merge_customers: linearMergeCustomersTool, + linear_create_customer_status: linearCreateCustomerStatusTool, + linear_update_customer_status: linearUpdateCustomerStatusTool, + linear_delete_customer_status: linearDeleteCustomerStatusTool, + linear_list_customer_statuses: linearListCustomerStatusesTool, + linear_create_customer_tier: linearCreateCustomerTierTool, + linear_update_customer_tier: linearUpdateCustomerTierTool, + linear_delete_customer_tier: linearDeleteCustomerTierTool, + linear_list_customer_tiers: linearListCustomerTiersTool, + linear_delete_project: linearDeleteProjectTool, + linear_create_project_label: linearCreateProjectLabelTool, + linear_update_project_label: linearUpdateProjectLabelTool, + linear_delete_project_label: linearDeleteProjectLabelTool, + linear_list_project_labels: linearListProjectLabelsTool, + linear_add_label_to_project: linearAddLabelToProjectTool, + linear_remove_label_from_project: linearRemoveLabelFromProjectTool, + linear_create_project_milestone: linearCreateProjectMilestoneTool, + linear_update_project_milestone: linearUpdateProjectMilestoneTool, + linear_delete_project_milestone: linearDeleteProjectMilestoneTool, + linear_list_project_milestones: linearListProjectMilestonesTool, + linear_create_project_status: linearCreateProjectStatusTool, + linear_update_project_status: linearUpdateProjectStatusTool, + linear_delete_project_status: linearDeleteProjectStatusTool, + linear_list_project_statuses: linearListProjectStatusesTool, + langsmith_create_run: langsmithCreateRunTool, + langsmith_create_runs_batch: langsmithCreateRunsBatchTool, + lemlist_get_activities: lemlistGetActivitiesTool, + lemlist_get_lead: lemlistGetLeadTool, + lemlist_send_email: lemlistSendEmailTool, + shopify_create_product: shopifyCreateProductTool, + shopify_get_product: shopifyGetProductTool, + shopify_list_products: shopifyListProductsTool, + shopify_update_product: shopifyUpdateProductTool, + shopify_delete_product: shopifyDeleteProductTool, + shopify_get_order: shopifyGetOrderTool, + shopify_list_orders: shopifyListOrdersTool, + shopify_update_order: shopifyUpdateOrderTool, + shopify_cancel_order: shopifyCancelOrderTool, + shopify_create_customer: shopifyCreateCustomerTool, + shopify_get_customer: shopifyGetCustomerTool, + shopify_list_customers: shopifyListCustomersTool, + shopify_update_customer: shopifyUpdateCustomerTool, + shopify_delete_customer: shopifyDeleteCustomerTool, + shopify_get_inventory_level: shopifyGetInventoryLevelTool, + shopify_adjust_inventory: shopifyAdjustInventoryTool, + shopify_list_inventory_items: shopifyListInventoryItemsTool, + shopify_list_locations: shopifyListLocationsTool, + shopify_create_fulfillment: shopifyCreateFulfillmentTool, + shopify_list_collections: shopifyListCollectionsTool, + shopify_get_collection: shopifyGetCollectionTool, + onedrive_create_folder: onedriveCreateFolderTool, + onedrive_delete: onedriveDeleteTool, + onedrive_download: onedriveDownloadTool, + onedrive_list: onedriveListTool, + onedrive_upload: onedriveUploadTool, + microsoft_excel_read: microsoftExcelReadTool, + microsoft_excel_write: microsoftExcelWriteTool, + microsoft_excel_table_add: microsoftExcelTableAddTool, + microsoft_excel_worksheet_add: microsoftExcelWorksheetAddTool, + microsoft_excel_read_v2: microsoftExcelReadV2Tool, + microsoft_excel_write_v2: microsoftExcelWriteV2Tool, + microsoft_planner_create_task: microsoftPlannerCreateTaskTool, + microsoft_planner_read_task: microsoftPlannerReadTaskTool, + microsoft_planner_update_task: microsoftPlannerUpdateTaskTool, + microsoft_planner_delete_task: microsoftPlannerDeleteTaskTool, + microsoft_planner_list_plans: microsoftPlannerListPlansTool, + microsoft_planner_read_plan: microsoftPlannerReadPlanTool, + microsoft_planner_list_buckets: microsoftPlannerListBucketsTool, + microsoft_planner_read_bucket: microsoftPlannerReadBucketTool, + microsoft_planner_create_bucket: microsoftPlannerCreateBucketTool, + microsoft_planner_update_bucket: microsoftPlannerUpdateBucketTool, + microsoft_planner_delete_bucket: microsoftPlannerDeleteBucketTool, + microsoft_planner_get_task_details: microsoftPlannerGetTaskDetailsTool, + microsoft_planner_update_task_details: microsoftPlannerUpdateTaskDetailsTool, + google_calendar_create: googleCalendarCreateTool, + google_calendar_create_v2: googleCalendarCreateV2Tool, + google_calendar_delete: googleCalendarDeleteTool, + google_calendar_delete_v2: googleCalendarDeleteV2Tool, + google_calendar_get: googleCalendarGetTool, + google_calendar_get_v2: googleCalendarGetV2Tool, + google_calendar_instances: googleCalendarInstancesTool, + google_calendar_instances_v2: googleCalendarInstancesV2Tool, + google_calendar_invite: googleCalendarInviteTool, + google_calendar_invite_v2: googleCalendarInviteV2Tool, + google_calendar_list: googleCalendarListTool, + google_calendar_list_v2: googleCalendarListV2Tool, + google_calendar_list_calendars: googleCalendarListCalendarsTool, + google_calendar_list_calendars_v2: googleCalendarListCalendarsV2Tool, + google_calendar_move: googleCalendarMoveTool, + google_calendar_move_v2: googleCalendarMoveV2Tool, + google_calendar_quick_add: googleCalendarQuickAddTool, + google_calendar_quick_add_v2: googleCalendarQuickAddV2Tool, + google_calendar_update: googleCalendarUpdateTool, + google_calendar_update_v2: googleCalendarUpdateV2Tool, + google_forms_get_responses: googleFormsGetResponsesTool, + google_forms_get_form: googleFormsGetFormTool, + google_forms_create_form: googleFormsCreateFormTool, + google_forms_batch_update: googleFormsBatchUpdateTool, + google_forms_set_publish_settings: googleFormsSetPublishSettingsTool, + google_forms_create_watch: googleFormsCreateWatchTool, + google_forms_list_watches: googleFormsListWatchesTool, + google_forms_delete_watch: googleFormsDeleteWatchTool, + google_forms_renew_watch: googleFormsRenewWatchTool, + workflow_executor: workflowExecutorTool, + wealthbox_read_contact: wealthboxReadContactTool, + wealthbox_write_contact: wealthboxWriteContactTool, + wealthbox_read_task: wealthboxReadTaskTool, + wealthbox_write_task: wealthboxWriteTaskTool, + wealthbox_read_note: wealthboxReadNoteTool, + wealthbox_write_note: wealthboxWriteNoteTool, + webflow_list_items: webflowListItemsTool, + webflow_get_item: webflowGetItemTool, + webflow_create_item: webflowCreateItemTool, + webflow_update_item: webflowUpdateItemTool, + webflow_delete_item: webflowDeleteItemTool, + wikipedia_summary: wikipediaPageSummaryTool, + wikipedia_search: wikipediaSearchTool, + wikipedia_content: wikipediaPageContentTool, + wikipedia_random: wikipediaRandomPageTool, + wordpress_create_post: wordpressCreatePostTool, + wordpress_update_post: wordpressUpdatePostTool, + wordpress_delete_post: wordpressDeletePostTool, + wordpress_get_post: wordpressGetPostTool, + wordpress_list_posts: wordpressListPostsTool, + wordpress_create_page: wordpressCreatePageTool, + wordpress_update_page: wordpressUpdatePageTool, + wordpress_delete_page: wordpressDeletePageTool, + wordpress_get_page: wordpressGetPageTool, + wordpress_list_pages: wordpressListPagesTool, + wordpress_upload_media: wordpressUploadMediaTool, + wordpress_get_media: wordpressGetMediaTool, + wordpress_list_media: wordpressListMediaTool, + wordpress_delete_media: wordpressDeleteMediaTool, + wordpress_create_comment: wordpressCreateCommentTool, + wordpress_list_comments: wordpressListCommentsTool, + wordpress_update_comment: wordpressUpdateCommentTool, + wordpress_delete_comment: wordpressDeleteCommentTool, + wordpress_create_category: wordpressCreateCategoryTool, + wordpress_list_categories: wordpressListCategoriesTool, + wordpress_create_tag: wordpressCreateTagTool, + wordpress_list_tags: wordpressListTagsTool, + wordpress_get_current_user: wordpressGetCurrentUserTool, + wordpress_list_users: wordpressListUsersTool, + wordpress_get_user: wordpressGetUserTool, + wordpress_search_content: wordpressSearchContentTool, + google_vault_create_matters_export: createMattersExportTool, + google_vault_list_matters_export: listMattersExportTool, + google_vault_create_matters_holds: createMattersHoldsTool, + google_vault_list_matters_holds: listMattersHoldsTool, + google_vault_create_matters: createMattersTool, + google_vault_list_matters: listMattersTool, + google_vault_download_export_file: downloadExportFileTool, + google_groups_add_alias: googleGroupsAddAliasTool, + google_groups_add_member: googleGroupsAddMemberTool, + google_groups_create_group: googleGroupsCreateGroupTool, + google_groups_delete_group: googleGroupsDeleteGroupTool, + google_groups_get_group: googleGroupsGetGroupTool, + google_groups_get_member: googleGroupsGetMemberTool, + google_groups_get_settings: googleGroupsGetSettingsTool, + google_groups_has_member: googleGroupsHasMemberTool, + google_groups_list_aliases: googleGroupsListAliasesTool, + google_groups_list_groups: googleGroupsListGroupsTool, + google_groups_list_members: googleGroupsListMembersTool, + google_groups_remove_alias: googleGroupsRemoveAliasTool, + google_groups_remove_member: googleGroupsRemoveMemberTool, + google_groups_update_group: googleGroupsUpdateGroupTool, + google_groups_update_member: googleGroupsUpdateMemberTool, + google_groups_update_settings: googleGroupsUpdateSettingsTool, + qdrant_fetch_points: qdrantFetchTool, + qdrant_search_vector: qdrantSearchTool, + qdrant_upsert_points: qdrantUpsertTool, + hunter_discover: hunterDiscoverTool, + hunter_domain_search: hunterDomainSearchTool, + hunter_email_finder: hunterEmailFinderTool, + hunter_email_verifier: hunterEmailVerifierTool, + hunter_companies_find: hunterCompaniesFindTool, + hunter_email_count: hunterEmailCountTool, + incidentio_incidents_list: incidentioIncidentsListTool, + incidentio_incidents_create: incidentioIncidentsCreateTool, + incidentio_incidents_show: incidentioIncidentsShowTool, + incidentio_incidents_update: incidentioIncidentsUpdateTool, + incidentio_actions_list: incidentioActionsListTool, + incidentio_actions_show: incidentioActionsShowTool, + incidentio_follow_ups_list: incidentioFollowUpsListTool, + incidentio_follow_ups_show: incidentioFollowUpsShowTool, + incidentio_workflows_list: incidentioWorkflowsListTool, + incidentio_workflows_create: incidentioWorkflowsCreateTool, + incidentio_workflows_show: incidentioWorkflowsShowTool, + incidentio_workflows_update: incidentioWorkflowsUpdateTool, + incidentio_workflows_delete: incidentioWorkflowsDeleteTool, + incidentio_custom_fields_list: incidentioCustomFieldsListTool, + incidentio_users_list: incidentioUsersListTool, + incidentio_users_show: incidentioUsersShowTool, + incidentio_severities_list: incidentioSeveritiesListTool, + incidentio_incident_statuses_list: incidentioIncidentStatusesListTool, + incidentio_incident_types_list: incidentioIncidentTypesListTool, + incidentio_incident_roles_list: incidentioIncidentRolesListTool, + incidentio_incident_roles_create: incidentioIncidentRolesCreateTool, + incidentio_incident_roles_show: incidentioIncidentRolesShowTool, + incidentio_incident_roles_update: incidentioIncidentRolesUpdateTool, + incidentio_incident_roles_delete: incidentioIncidentRolesDeleteTool, + incidentio_incident_timestamps_list: incidentioIncidentTimestampsListTool, + incidentio_incident_timestamps_show: incidentioIncidentTimestampsShowTool, + incidentio_incident_updates_list: incidentioIncidentUpdatesListTool, + incidentio_schedule_entries_list: incidentioScheduleEntriesListTool, + incidentio_schedule_overrides_create: incidentioScheduleOverridesCreateTool, + incidentio_escalation_paths_create: incidentioEscalationPathsCreateTool, + incidentio_escalation_paths_show: incidentioEscalationPathsShowTool, + incidentio_escalation_paths_update: incidentioEscalationPathsUpdateTool, + incidentio_escalation_paths_delete: incidentioEscalationPathsDeleteTool, + hubspot_create_company: hubspotCreateCompanyTool, + hubspot_create_contact: hubspotCreateContactTool, + hubspot_get_company: hubspotGetCompanyTool, + hubspot_get_contact: hubspotGetContactTool, + hubspot_get_users: hubspotGetUsersTool, + hubspot_list_companies: hubspotListCompaniesTool, + hubspot_list_contacts: hubspotListContactsTool, + hubspot_list_deals: hubspotListDealsTool, + hubspot_search_companies: hubspotSearchCompaniesTool, + hubspot_search_contacts: hubspotSearchContactsTool, + hubspot_update_company: hubspotUpdateCompanyTool, + hubspot_update_contact: hubspotUpdateContactTool, + sharepoint_create_page: sharepointCreatePageTool, + sharepoint_read_page: sharepointReadPageTool, + sharepoint_list_sites: sharepointListSitesTool, + sharepoint_get_list: sharepointGetListTool, + sharepoint_create_list: sharepointCreateListTool, + sharepoint_update_list: sharepointUpdateListItemTool, + sharepoint_add_list_items: sharepointAddListItemTool, + sharepoint_upload_file: sharepointUploadFileTool, + stripe_create_payment_intent: stripeCreatePaymentIntentTool, + stripe_retrieve_payment_intent: stripeRetrievePaymentIntentTool, + stripe_update_payment_intent: stripeUpdatePaymentIntentTool, + stripe_confirm_payment_intent: stripeConfirmPaymentIntentTool, + stripe_capture_payment_intent: stripeCapturePaymentIntentTool, + stripe_cancel_payment_intent: stripeCancelPaymentIntentTool, + stripe_list_payment_intents: stripeListPaymentIntentsTool, + stripe_search_payment_intents: stripeSearchPaymentIntentsTool, + stripe_create_customer: stripeCreateCustomerTool, + stripe_retrieve_customer: stripeRetrieveCustomerTool, + stripe_update_customer: stripeUpdateCustomerTool, + stripe_delete_customer: stripeDeleteCustomerTool, + stripe_list_customers: stripeListCustomersTool, + stripe_search_customers: stripeSearchCustomersTool, + stripe_create_subscription: stripeCreateSubscriptionTool, + stripe_retrieve_subscription: stripeRetrieveSubscriptionTool, + stripe_update_subscription: stripeUpdateSubscriptionTool, + stripe_cancel_subscription: stripeCancelSubscriptionTool, + stripe_resume_subscription: stripeResumeSubscriptionTool, + stripe_list_subscriptions: stripeListSubscriptionsTool, + stripe_search_subscriptions: stripeSearchSubscriptionsTool, + stripe_create_invoice: stripeCreateInvoiceTool, + stripe_retrieve_invoice: stripeRetrieveInvoiceTool, + stripe_update_invoice: stripeUpdateInvoiceTool, + stripe_delete_invoice: stripeDeleteInvoiceTool, + stripe_finalize_invoice: stripeFinalizeInvoiceTool, + stripe_pay_invoice: stripePayInvoiceTool, + stripe_void_invoice: stripeVoidInvoiceTool, + stripe_send_invoice: stripeSendInvoiceTool, + stripe_list_invoices: stripeListInvoicesTool, + stripe_search_invoices: stripeSearchInvoicesTool, + stripe_create_charge: stripeCreateChargeTool, + stripe_retrieve_charge: stripeRetrieveChargeTool, + stripe_update_charge: stripeUpdateChargeTool, + stripe_capture_charge: stripeCaptureChargeTool, + stripe_list_charges: stripeListChargesTool, + stripe_search_charges: stripeSearchChargesTool, + stripe_create_product: stripeCreateProductTool, + stripe_retrieve_product: stripeRetrieveProductTool, + stripe_update_product: stripeUpdateProductTool, + stripe_delete_product: stripeDeleteProductTool, + stripe_list_products: stripeListProductsTool, + stripe_search_products: stripeSearchProductsTool, + stripe_create_price: stripeCreatePriceTool, + stripe_retrieve_price: stripeRetrievePriceTool, + stripe_update_price: stripeUpdatePriceTool, + stripe_list_prices: stripeListPricesTool, + stripe_search_prices: stripeSearchPricesTool, + stripe_retrieve_event: stripeRetrieveEventTool, + stripe_list_events: stripeListEventsTool, + salesforce_get_accounts: salesforceGetAccountsTool, + salesforce_create_account: salesforceCreateAccountTool, + salesforce_update_account: salesforceUpdateAccountTool, + salesforce_delete_account: salesforceDeleteAccountTool, + salesforce_get_contacts: salesforceGetContactsTool, + salesforce_create_contact: salesforceCreateContactTool, + salesforce_update_contact: salesforceUpdateContactTool, + salesforce_delete_contact: salesforceDeleteContactTool, + salesforce_get_leads: salesforceGetLeadsTool, + salesforce_create_lead: salesforceCreateLeadTool, + salesforce_update_lead: salesforceUpdateLeadTool, + salesforce_delete_lead: salesforceDeleteLeadTool, + salesforce_get_opportunities: salesforceGetOpportunitiesTool, + salesforce_create_opportunity: salesforceCreateOpportunityTool, + salesforce_update_opportunity: salesforceUpdateOpportunityTool, + salesforce_delete_opportunity: salesforceDeleteOpportunityTool, + salesforce_get_cases: salesforceGetCasesTool, + salesforce_create_case: salesforceCreateCaseTool, + salesforce_update_case: salesforceUpdateCaseTool, + salesforce_delete_case: salesforceDeleteCaseTool, + salesforce_get_tasks: salesforceGetTasksTool, + salesforce_create_task: salesforceCreateTaskTool, + salesforce_update_task: salesforceUpdateTaskTool, + salesforce_delete_task: salesforceDeleteTaskTool, + salesforce_list_reports: salesforceListReportsTool, + salesforce_get_report: salesforceGetReportTool, + salesforce_run_report: salesforceRunReportTool, + salesforce_list_report_types: salesforceListReportTypesTool, + salesforce_list_dashboards: salesforceListDashboardsTool, + salesforce_get_dashboard: salesforceGetDashboardTool, + salesforce_refresh_dashboard: salesforceRefreshDashboardTool, + salesforce_query: salesforceQueryTool, + salesforce_query_more: salesforceQueryMoreTool, + salesforce_describe_object: salesforceDescribeObjectTool, + salesforce_list_objects: salesforceListObjectsTool, + sqs_send: sqsSendTool, + mailchimp_get_audiences: mailchimpGetAudiencesTool, + mailchimp_get_audience: mailchimpGetAudienceTool, + mailchimp_create_audience: mailchimpCreateAudienceTool, + mailchimp_update_audience: mailchimpUpdateAudienceTool, + mailchimp_delete_audience: mailchimpDeleteAudienceTool, + mailchimp_get_members: mailchimpGetMembersTool, + mailchimp_get_member: mailchimpGetMemberTool, + mailchimp_add_member: mailchimpAddMemberTool, + mailchimp_add_or_update_member: mailchimpAddOrUpdateMemberTool, + mailchimp_update_member: mailchimpUpdateMemberTool, + mailchimp_delete_member: mailchimpDeleteMemberTool, + mailchimp_archive_member: mailchimpArchiveMemberTool, + mailchimp_unarchive_member: mailchimpUnarchiveMemberTool, + mailchimp_get_campaigns: mailchimpGetCampaignsTool, + mailchimp_get_campaign: mailchimpGetCampaignTool, + mailchimp_create_campaign: mailchimpCreateCampaignTool, + mailchimp_update_campaign: mailchimpUpdateCampaignTool, + mailchimp_delete_campaign: mailchimpDeleteCampaignTool, + mailchimp_send_campaign: mailchimpSendCampaignTool, + mailchimp_schedule_campaign: mailchimpScheduleCampaignTool, + mailchimp_unschedule_campaign: mailchimpUnscheduleCampaignTool, + mailchimp_replicate_campaign: mailchimpReplicateCampaignTool, + mailchimp_get_campaign_content: mailchimpGetCampaignContentTool, + mailchimp_set_campaign_content: mailchimpSetCampaignContentTool, + mailchimp_get_automations: mailchimpGetAutomationsTool, + mailchimp_get_automation: mailchimpGetAutomationTool, + mailchimp_start_automation: mailchimpStartAutomationTool, + mailchimp_pause_automation: mailchimpPauseAutomationTool, + mailchimp_add_subscriber_to_automation: mailchimpAddSubscriberToAutomationTool, + mailchimp_get_templates: mailchimpGetTemplatesTool, + mailchimp_get_template: mailchimpGetTemplateTool, + mailchimp_create_template: mailchimpCreateTemplateTool, + mailchimp_update_template: mailchimpUpdateTemplateTool, + mailchimp_delete_template: mailchimpDeleteTemplateTool, + mailchimp_get_campaign_reports: mailchimpGetCampaignReportsTool, + mailchimp_get_campaign_report: mailchimpGetCampaignReportTool, + mailchimp_get_segments: mailchimpGetSegmentsTool, + mailchimp_get_segment: mailchimpGetSegmentTool, + mailchimp_create_segment: mailchimpCreateSegmentTool, + mailchimp_update_segment: mailchimpUpdateSegmentTool, + mailchimp_delete_segment: mailchimpDeleteSegmentTool, + mailchimp_get_segment_members: mailchimpGetSegmentMembersTool, + mailchimp_add_segment_member: mailchimpAddSegmentMemberTool, + mailchimp_remove_segment_member: mailchimpRemoveSegmentMemberTool, + mailchimp_get_member_tags: mailchimpGetMemberTagsTool, + mailchimp_add_member_tags: mailchimpAddMemberTagsTool, + mailchimp_remove_member_tags: mailchimpRemoveMemberTagsTool, + mailchimp_get_merge_fields: mailchimpGetMergeFieldsTool, + mailchimp_get_merge_field: mailchimpGetMergeFieldTool, + mailchimp_create_merge_field: mailchimpCreateMergeFieldTool, + mailchimp_update_merge_field: mailchimpUpdateMergeFieldTool, + mailchimp_delete_merge_field: mailchimpDeleteMergeFieldTool, + mailchimp_get_interest_categories: mailchimpGetInterestCategoriesTool, + mailchimp_get_interest_category: mailchimpGetInterestCategoryTool, + mailchimp_create_interest_category: mailchimpCreateInterestCategoryTool, + mailchimp_update_interest_category: mailchimpUpdateInterestCategoryTool, + mailchimp_delete_interest_category: mailchimpDeleteInterestCategoryTool, + mailchimp_get_interests: mailchimpGetInterestsTool, + mailchimp_get_interest: mailchimpGetInterestTool, + mailchimp_create_interest: mailchimpCreateInterestTool, + mailchimp_update_interest: mailchimpUpdateInterestTool, + mailchimp_delete_interest: mailchimpDeleteInterestTool, + mailchimp_get_landing_pages: mailchimpGetLandingPagesTool, + mailchimp_get_landing_page: mailchimpGetLandingPageTool, + mailchimp_create_landing_page: mailchimpCreateLandingPageTool, + mailchimp_update_landing_page: mailchimpUpdateLandingPageTool, + mailchimp_delete_landing_page: mailchimpDeleteLandingPageTool, + mailchimp_publish_landing_page: mailchimpPublishLandingPageTool, + mailchimp_unpublish_landing_page: mailchimpUnpublishLandingPageTool, + mailchimp_get_batch_operations: mailchimpGetBatchOperationsTool, + mailchimp_get_batch_operation: mailchimpGetBatchOperationTool, + mailchimp_create_batch_operation: mailchimpCreateBatchOperationTool, + mailchimp_delete_batch_operation: mailchimpDeleteBatchOperationTool, + zendesk_get_tickets: zendeskGetTicketsTool, + zendesk_get_ticket: zendeskGetTicketTool, + zendesk_create_ticket: zendeskCreateTicketTool, + zendesk_create_tickets_bulk: zendeskCreateTicketsBulkTool, + zendesk_update_ticket: zendeskUpdateTicketTool, + zendesk_update_tickets_bulk: zendeskUpdateTicketsBulkTool, + zendesk_delete_ticket: zendeskDeleteTicketTool, + zendesk_merge_tickets: zendeskMergeTicketsTool, + zendesk_get_users: zendeskGetUsersTool, + zendesk_get_user: zendeskGetUserTool, + zendesk_search_users: zendeskSearchUsersTool, + zendesk_create_user: zendeskCreateUserTool, + zendesk_create_users_bulk: zendeskCreateUsersBulkTool, + zendesk_update_user: zendeskUpdateUserTool, + zendesk_update_users_bulk: zendeskUpdateUsersBulkTool, + zendesk_delete_user: zendeskDeleteUserTool, + zendesk_get_current_user: zendeskGetCurrentUserTool, + zendesk_get_organizations: zendeskGetOrganizationsTool, + zendesk_get_organization: zendeskGetOrganizationTool, + zendesk_autocomplete_organizations: zendeskAutocompleteOrganizationsTool, + zendesk_create_organization: zendeskCreateOrganizationTool, + zendesk_create_organizations_bulk: zendeskCreateOrganizationsBulkTool, + zendesk_update_organization: zendeskUpdateOrganizationTool, + zendesk_delete_organization: zendeskDeleteOrganizationTool, + zendesk_search: zendeskSearchTool, + zendesk_search_count: zendeskSearchCountTool, + intercom_create_contact: intercomCreateContactTool, + intercom_create_contact_v2: intercomCreateContactV2Tool, + intercom_get_contact: intercomGetContactTool, + intercom_get_contact_v2: intercomGetContactV2Tool, + intercom_update_contact: intercomUpdateContactTool, + intercom_update_contact_v2: intercomUpdateContactV2Tool, + intercom_list_contacts: intercomListContactsTool, + intercom_list_contacts_v2: intercomListContactsV2Tool, + intercom_search_contacts: intercomSearchContactsTool, + intercom_search_contacts_v2: intercomSearchContactsV2Tool, + intercom_delete_contact: intercomDeleteContactTool, + intercom_delete_contact_v2: intercomDeleteContactV2Tool, + intercom_create_company: intercomCreateCompanyTool, + intercom_create_company_v2: intercomCreateCompanyV2Tool, + intercom_get_company: intercomGetCompanyTool, + intercom_get_company_v2: intercomGetCompanyV2Tool, + intercom_list_companies: intercomListCompaniesTool, + intercom_list_companies_v2: intercomListCompaniesV2Tool, + intercom_get_conversation: intercomGetConversationTool, + intercom_get_conversation_v2: intercomGetConversationV2Tool, + intercom_list_conversations: intercomListConversationsTool, + intercom_list_conversations_v2: intercomListConversationsV2Tool, + intercom_reply_conversation: intercomReplyConversationTool, + intercom_reply_conversation_v2: intercomReplyConversationV2Tool, + intercom_search_conversations: intercomSearchConversationsTool, + intercom_search_conversations_v2: intercomSearchConversationsV2Tool, + intercom_create_ticket: intercomCreateTicketTool, + intercom_create_ticket_v2: intercomCreateTicketV2Tool, + intercom_get_ticket: intercomGetTicketTool, + intercom_get_ticket_v2: intercomGetTicketV2Tool, + intercom_update_ticket_v2: intercomUpdateTicketV2Tool, + intercom_create_message: intercomCreateMessageTool, + intercom_create_message_v2: intercomCreateMessageV2Tool, + intercom_list_admins_v2: intercomListAdminsV2Tool, + intercom_close_conversation_v2: intercomCloseConversationV2Tool, + intercom_open_conversation_v2: intercomOpenConversationV2Tool, + intercom_snooze_conversation_v2: intercomSnoozeConversationV2Tool, + intercom_assign_conversation_v2: intercomAssignConversationV2Tool, + intercom_list_tags_v2: intercomListTagsV2Tool, + intercom_create_tag_v2: intercomCreateTagV2Tool, + intercom_tag_contact_v2: intercomTagContactV2Tool, + intercom_untag_contact_v2: intercomUntagContactV2Tool, + intercom_tag_conversation_v2: intercomTagConversationV2Tool, + intercom_create_note_v2: intercomCreateNoteV2Tool, + intercom_create_event_v2: intercomCreateEventV2Tool, + intercom_attach_contact_to_company_v2: intercomAttachContactToCompanyV2Tool, + intercom_detach_contact_from_company_v2: intercomDetachContactFromCompanyV2Tool, + sentry_issues_list: listIssuesTool, + sentry_issues_get: getIssueTool, + sentry_issues_update: updateIssueTool, + sentry_projects_list: listProjectsTool, + sentry_projects_get: getProjectTool, + sentry_projects_create: createProjectTool, + sentry_projects_update: updateProjectTool, + sentry_events_list: listEventsTool, + sentry_events_get: getEventTool, + sentry_releases_list: listReleasesTool, + sentry_releases_create: createReleaseTool, + sentry_releases_deploy: createDeployTool, + zoom_create_meeting: zoomCreateMeetingTool, + zoom_list_meetings: zoomListMeetingsTool, + zoom_get_meeting: zoomGetMeetingTool, + zoom_update_meeting: zoomUpdateMeetingTool, + zoom_delete_meeting: zoomDeleteMeetingTool, + zoom_get_meeting_invitation: zoomGetMeetingInvitationTool, + zoom_list_recordings: zoomListRecordingsTool, + zoom_get_meeting_recordings: zoomGetMeetingRecordingsTool, + zoom_delete_recording: zoomDeleteRecordingTool, + zoom_list_past_participants: zoomListPastParticipantsTool, + // Spotify + spotify_search: spotifySearchTool, + spotify_get_track: spotifyGetTrackTool, + spotify_get_tracks: spotifyGetTracksTool, + spotify_get_album: spotifyGetAlbumTool, + spotify_get_albums: spotifyGetAlbumsTool, + spotify_get_album_tracks: spotifyGetAlbumTracksTool, + spotify_get_saved_albums: spotifyGetSavedAlbumsTool, + spotify_save_albums: spotifySaveAlbumsTool, + spotify_remove_saved_albums: spotifyRemoveSavedAlbumsTool, + spotify_check_saved_albums: spotifyCheckSavedAlbumsTool, + spotify_get_artist: spotifyGetArtistTool, + spotify_get_artists: spotifyGetArtistsTool, + spotify_get_artist_albums: spotifyGetArtistAlbumsTool, + spotify_get_artist_top_tracks: spotifyGetArtistTopTracksTool, + spotify_follow_artists: spotifyFollowArtistsTool, + spotify_unfollow_artists: spotifyUnfollowArtistsTool, + spotify_get_followed_artists: spotifyGetFollowedArtistsTool, + spotify_check_following: spotifyCheckFollowingTool, + spotify_get_show: spotifyGetShowTool, + spotify_get_shows: spotifyGetShowsTool, + spotify_get_show_episodes: spotifyGetShowEpisodesTool, + spotify_get_saved_shows: spotifyGetSavedShowsTool, + spotify_save_shows: spotifySaveShowsTool, + spotify_remove_saved_shows: spotifyRemoveSavedShowsTool, + spotify_check_saved_shows: spotifyCheckSavedShowsTool, + spotify_get_episode: spotifyGetEpisodeTool, + spotify_get_episodes: spotifyGetEpisodesTool, + spotify_get_saved_episodes: spotifyGetSavedEpisodesTool, + spotify_save_episodes: spotifySaveEpisodesTool, + spotify_remove_saved_episodes: spotifyRemoveSavedEpisodesTool, + spotify_check_saved_episodes: spotifyCheckSavedEpisodesTool, + spotify_get_audiobook: spotifyGetAudiobookTool, + spotify_get_audiobooks: spotifyGetAudiobooksTool, + spotify_get_audiobook_chapters: spotifyGetAudiobookChaptersTool, + spotify_get_saved_audiobooks: spotifyGetSavedAudiobooksTool, + spotify_save_audiobooks: spotifySaveAudiobooksTool, + spotify_remove_saved_audiobooks: spotifyRemoveSavedAudiobooksTool, + spotify_check_saved_audiobooks: spotifyCheckSavedAudiobooksTool, + spotify_get_playlist: spotifyGetPlaylistTool, + spotify_get_playlist_tracks: spotifyGetPlaylistTracksTool, + spotify_get_playlist_cover: spotifyGetPlaylistCoverTool, + spotify_get_user_playlists: spotifyGetUserPlaylistsTool, + spotify_create_playlist: spotifyCreatePlaylistTool, + spotify_update_playlist: spotifyUpdatePlaylistTool, + spotify_add_playlist_cover: spotifyAddPlaylistCoverTool, + spotify_add_tracks_to_playlist: spotifyAddTracksToPlaylistTool, + spotify_remove_tracks_from_playlist: spotifyRemoveTracksFromPlaylistTool, + spotify_reorder_playlist_items: spotifyReorderPlaylistItemsTool, + spotify_replace_playlist_items: spotifyReplacePlaylistItemsTool, + spotify_follow_playlist: spotifyFollowPlaylistTool, + spotify_unfollow_playlist: spotifyUnfollowPlaylistTool, + spotify_check_playlist_followers: spotifyCheckPlaylistFollowersTool, + spotify_get_current_user: spotifyGetCurrentUserTool, + spotify_get_user_profile: spotifyGetUserProfileTool, + spotify_get_top_tracks: spotifyGetTopTracksTool, + spotify_get_top_artists: spotifyGetTopArtistsTool, + spotify_get_saved_tracks: spotifyGetSavedTracksTool, + spotify_save_tracks: spotifySaveTracksTool, + spotify_remove_saved_tracks: spotifyRemoveSavedTracksTool, + spotify_check_saved_tracks: spotifyCheckSavedTracksTool, + spotify_get_recently_played: spotifyGetRecentlyPlayedTool, + spotify_get_new_releases: spotifyGetNewReleasesTool, + spotify_get_categories: spotifyGetCategoriesTool, + spotify_get_markets: spotifyGetMarketsTool, + spotify_get_playback_state: spotifyGetPlaybackStateTool, + spotify_get_currently_playing: spotifyGetCurrentlyPlayingTool, + spotify_get_devices: spotifyGetDevicesTool, + spotify_get_queue: spotifyGetQueueTool, + spotify_play: spotifyPlayTool, + spotify_pause: spotifyPauseTool, + spotify_skip_next: spotifySkipNextTool, + spotify_skip_previous: spotifySkipPreviousTool, + spotify_seek: spotifySeekTool, + spotify_add_to_queue: spotifyAddToQueueTool, + spotify_set_volume: spotifySetVolumeTool, + spotify_set_repeat: spotifySetRepeatTool, + spotify_set_shuffle: spotifySetShuffleTool, + spotify_transfer_playback: spotifyTransferPlaybackTool, +} diff --git a/.tmp_old_types.ts b/.tmp_old_types.ts new file mode 100644 index 0000000000..49b76bd624 --- /dev/null +++ b/.tmp_old_types.ts @@ -0,0 +1,261 @@ +import type { CopilotMode, CopilotModelId } from '@/lib/copilot/models' + +export type { CopilotMode, CopilotModelId } from '@/lib/copilot/models' + +import type { ClientToolCallState, ClientToolDisplay } from '@/lib/copilot/tools/client/base-tool' +import type { WorkflowState } from '@/stores/workflows/workflow/types' + +export type ToolState = ClientToolCallState + +/** + * Subagent content block for nested thinking/reasoning inside a tool call + */ +export interface SubAgentContentBlock { + type: 'subagent_text' | 'subagent_tool_call' + content?: string + toolCall?: CopilotToolCall + timestamp: number +} + +export interface CopilotToolCall { + id: string + name: string + state: ClientToolCallState + params?: Record + display?: ClientToolDisplay + /** Content streamed from a subagent (e.g., debug agent) */ + subAgentContent?: string + /** Tool calls made by the subagent */ + subAgentToolCalls?: CopilotToolCall[] + /** Structured content blocks for subagent (thinking + tool calls in order) */ + subAgentBlocks?: SubAgentContentBlock[] + /** Whether subagent is currently streaming */ + subAgentStreaming?: boolean +} + +export interface MessageFileAttachment { + id: string + key: string + filename: string + media_type: string + size: number +} + +export interface CopilotMessage { + id: string + role: 'user' | 'assistant' | 'system' + content: string + timestamp: string + citations?: { id: number; title: string; url: string; similarity?: number }[] + toolCalls?: CopilotToolCall[] + contentBlocks?: Array< + | { type: 'text'; content: string; timestamp: number } + | { + type: 'thinking' + content: string + timestamp: number + duration?: number + startTime?: number + } + | { type: 'tool_call'; toolCall: CopilotToolCall; timestamp: number } + | { type: 'contexts'; contexts: ChatContext[]; timestamp: number } + > + fileAttachments?: MessageFileAttachment[] + contexts?: ChatContext[] + errorType?: 'usage_limit' | 'unauthorized' | 'forbidden' | 'rate_limit' | 'upgrade_required' +} + +/** + * A message queued for sending while another message is in progress. + * Like Cursor's queued message feature. + */ +export interface QueuedMessage { + id: string + content: string + fileAttachments?: MessageFileAttachment[] + contexts?: ChatContext[] + queuedAt: number + /** Original messageId to use when processing (for edit/resend flows) */ + originalMessageId?: string +} + +// Contexts attached to a user message +export type ChatContext = + | { kind: 'past_chat'; chatId: string; label: string } + | { kind: 'workflow'; workflowId: string; label: string } + | { kind: 'current_workflow'; workflowId: string; label: string } + | { kind: 'blocks'; blockIds: string[]; label: string } + | { kind: 'logs'; executionId?: string; label: string } + | { kind: 'workflow_block'; workflowId: string; blockId: string; label: string } + | { kind: 'knowledge'; knowledgeId?: string; label: string } + | { kind: 'templates'; templateId?: string; label: string } + | { kind: 'docs'; label: string } + | { kind: 'slash_command'; command: string; label: string } + +import type { CopilotChat as ApiCopilotChat } from '@/lib/copilot/api' + +export type CopilotChat = ApiCopilotChat + +export interface CopilotState { + mode: CopilotMode + selectedModel: CopilotModelId + agentPrefetch: boolean + enabledModels: string[] | null // Null means not loaded yet, array of model IDs when loaded + isCollapsed: boolean + + currentChat: CopilotChat | null + chats: CopilotChat[] + messages: CopilotMessage[] + workflowId: string | null + + checkpoints: any[] + messageCheckpoints: Record + messageSnapshots: Record + + isLoading: boolean + isLoadingChats: boolean + isLoadingCheckpoints: boolean + isSendingMessage: boolean + isSaving: boolean + isRevertingCheckpoint: boolean + isAborting: boolean + /** Skip adding Continue option on abort for queued send-now */ + suppressAbortContinueOption?: boolean + + error: string | null + saveError: string | null + checkpointError: string | null + + abortController: AbortController | null + + chatsLastLoadedAt: Date | null + chatsLoadedForWorkflow: string | null + + revertState: { messageId: string; messageContent: string } | null + inputValue: string + + planTodos: Array<{ id: string; content: string; completed?: boolean; executing?: boolean }> + showPlanTodos: boolean + + // Streaming plan content from design_workflow tool (for plan mode section) + streamingPlanContent: string + + // Map of toolCallId -> CopilotToolCall for quick access during streaming + toolCallsById: Record + + // Transient flag to prevent auto-selecting a chat during new-chat UX + suppressAutoSelect?: boolean + + // Explicitly track the current user message id for this in-flight query (for stats/diff correlation) + currentUserMessageId?: string | null + + // Per-message metadata captured at send-time for reliable stats + + // Auto-allowed integration tools (tools that can run without confirmation) + autoAllowedTools: string[] + + // Message queue for messages sent while another is in progress + messageQueue: QueuedMessage[] + + // Credential IDs to mask in UI (for sensitive data protection) + sensitiveCredentialIds: Set +} + +export interface CopilotActions { + setMode: (mode: CopilotMode) => void + setSelectedModel: (model: CopilotStore['selectedModel']) => Promise + setAgentPrefetch: (prefetch: boolean) => void + setEnabledModels: (models: string[] | null) => void + + setWorkflowId: (workflowId: string | null) => Promise + validateCurrentChat: () => boolean + loadChats: (forceRefresh?: boolean) => Promise + areChatsFresh: (workflowId: string) => boolean + selectChat: (chat: CopilotChat) => Promise + createNewChat: () => Promise + deleteChat: (chatId: string) => Promise + + sendMessage: ( + message: string, + options?: { + stream?: boolean + fileAttachments?: MessageFileAttachment[] + contexts?: ChatContext[] + messageId?: string + queueIfBusy?: boolean + } + ) => Promise + abortMessage: (options?: { suppressContinueOption?: boolean }) => void + sendImplicitFeedback: ( + implicitFeedback: string, + toolCallState?: 'accepted' | 'rejected' | 'error' + ) => Promise + updatePreviewToolCallState: ( + toolCallState: 'accepted' | 'rejected' | 'error', + toolCallId?: string + ) => void + setToolCallState: (toolCall: any, newState: ClientToolCallState, options?: any) => void + updateToolCallParams: (toolCallId: string, params: Record) => void + sendDocsMessage: (query: string, options?: { stream?: boolean; topK?: number }) => Promise + saveChatMessages: (chatId: string) => Promise + + loadCheckpoints: (chatId: string) => Promise + loadMessageCheckpoints: (chatId: string) => Promise + revertToCheckpoint: (checkpointId: string) => Promise + getCheckpointsForMessage: (messageId: string) => any[] + saveMessageCheckpoint: (messageId: string) => Promise + + clearMessages: () => void + clearError: () => void + clearSaveError: () => void + clearCheckpointError: () => void + retrySave: (chatId: string) => Promise + cleanup: () => void + reset: () => void + + setInputValue: (value: string) => void + clearRevertState: () => void + + setPlanTodos: ( + todos: Array<{ id: string; content: string; completed?: boolean; executing?: boolean }> + ) => void + updatePlanTodoStatus: (id: string, status: 'executing' | 'completed') => void + closePlanTodos: () => void + clearPlanArtifact: () => Promise + savePlanArtifact: (content: string) => Promise + + handleStreamingResponse: ( + stream: ReadableStream, + messageId: string, + isContinuation?: boolean, + triggerUserMessageId?: string + ) => Promise + handleNewChatCreation: (newChatId: string) => Promise + executeIntegrationTool: (toolCallId: string) => Promise + skipIntegrationTool: (toolCallId: string) => void + loadAutoAllowedTools: () => Promise + addAutoAllowedTool: (toolId: string) => Promise + removeAutoAllowedTool: (toolId: string) => Promise + isToolAutoAllowed: (toolId: string) => boolean + + // Credential masking + loadSensitiveCredentialIds: () => Promise + maskCredentialValue: (value: string) => string + + // Message queue actions + addToQueue: ( + message: string, + options?: { + fileAttachments?: MessageFileAttachment[] + contexts?: ChatContext[] + /** Original messageId to preserve (for edit/resend flows) */ + messageId?: string + } + ) => void + removeFromQueue: (id: string) => void + moveUpInQueue: (id: string) => void + sendNow: (id: string) => Promise + clearQueue: () => void +} + +export type CopilotStore = CopilotState & CopilotActions diff --git a/apps/sim/app/api/copilot/chat/route.ts b/apps/sim/app/api/copilot/chat/route.ts index 513c0798d8..a77506a720 100644 --- a/apps/sim/app/api/copilot/chat/route.ts +++ b/apps/sim/app/api/copilot/chat/route.ts @@ -113,6 +113,7 @@ const ChatMessageSchema = z.object({ workflowId: z.string().optional(), knowledgeId: z.string().optional(), blockId: z.string().optional(), + blockIds: z.array(z.string()).optional(), templateId: z.string().optional(), executionId: z.string().optional(), // For workflow_block, provide both workflowId and blockId @@ -159,6 +160,20 @@ export async function POST(req: NextRequest) { commands, } = ChatMessageSchema.parse(body) + const normalizedContexts = Array.isArray(contexts) + ? contexts.map((ctx) => { + if (ctx.kind !== 'blocks') return ctx + if (Array.isArray(ctx.blockIds) && ctx.blockIds.length > 0) return ctx + if (ctx.blockId) { + return { + ...ctx, + blockIds: [ctx.blockId], + } + } + return ctx + }) + : contexts + // Resolve workflowId - if not provided, use first workflow or find by name const resolved = await resolveWorkflowIdForUser( authenticatedUserId, @@ -176,10 +191,10 @@ export async function POST(req: NextRequest) { const userMessageIdToUse = userMessageId || crypto.randomUUID() try { logger.info(`[${tracker.requestId}] Received chat POST`, { - hasContexts: Array.isArray(contexts), - contextsCount: Array.isArray(contexts) ? contexts.length : 0, - contextsPreview: Array.isArray(contexts) - ? contexts.map((c: any) => ({ + hasContexts: Array.isArray(normalizedContexts), + contextsCount: Array.isArray(normalizedContexts) ? normalizedContexts.length : 0, + contextsPreview: Array.isArray(normalizedContexts) + ? normalizedContexts.map((c: any) => ({ kind: c?.kind, chatId: c?.chatId, workflowId: c?.workflowId, @@ -191,17 +206,25 @@ export async function POST(req: NextRequest) { } catch {} // Preprocess contexts server-side let agentContexts: Array<{ type: string; content: string }> = [] - if (Array.isArray(contexts) && contexts.length > 0) { + if (Array.isArray(normalizedContexts) && normalizedContexts.length > 0) { try { const { processContextsServer } = await import('@/lib/copilot/process-contents') - const processed = await processContextsServer(contexts as any, authenticatedUserId, message) + const processed = await processContextsServer( + normalizedContexts as any, + authenticatedUserId, + message + ) agentContexts = processed logger.info(`[${tracker.requestId}] Contexts processed for request`, { processedCount: agentContexts.length, kinds: agentContexts.map((c) => c.type), lengthPreview: agentContexts.map((c) => c.content?.length ?? 0), }) - if (Array.isArray(contexts) && contexts.length > 0 && agentContexts.length === 0) { + if ( + Array.isArray(normalizedContexts) && + normalizedContexts.length > 0 && + agentContexts.length === 0 + ) { logger.warn( `[${tracker.requestId}] Contexts provided but none processed. Check executionId for logs contexts.` ) @@ -246,11 +269,13 @@ export async function POST(req: NextRequest) { mode, model: selectedModel, provider, + conversationId: effectiveConversationId, conversationHistory, contexts: agentContexts, fileAttachments, commands, chatId: actualChatId, + prefetch, implicitFeedback, }, { @@ -432,10 +457,14 @@ export async function POST(req: NextRequest) { content: message, timestamp: new Date().toISOString(), ...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }), - ...(Array.isArray(contexts) && contexts.length > 0 && { contexts }), - ...(Array.isArray(contexts) && - contexts.length > 0 && { - contentBlocks: [{ type: 'contexts', contexts: contexts as any, timestamp: Date.now() }], + ...(Array.isArray(normalizedContexts) && normalizedContexts.length > 0 && { + contexts: normalizedContexts, + }), + ...(Array.isArray(normalizedContexts) && + normalizedContexts.length > 0 && { + contentBlocks: [ + { type: 'contexts', contexts: normalizedContexts as any, timestamp: Date.now() }, + ], }), } diff --git a/apps/sim/lib/copilot/chat-payload.ts b/apps/sim/lib/copilot/chat-payload.ts index 110dbfbc79..06cf2b11f3 100644 --- a/apps/sim/lib/copilot/chat-payload.ts +++ b/apps/sim/lib/copilot/chat-payload.ts @@ -15,11 +15,13 @@ export interface BuildPayloadParams { mode: string model: string provider?: string + conversationId?: string conversationHistory?: unknown[] contexts?: Array<{ type: string; content: string }> fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }> commands?: string[] chatId?: string + prefetch?: boolean implicitFeedback?: string } @@ -60,10 +62,12 @@ export async function buildCopilotRequestPayload( userMessageId, mode, provider, + conversationId, contexts, fileAttachments, commands, chatId, + prefetch, } = params const selectedModel = options.selectedModel @@ -149,9 +153,11 @@ export async function buildCopilotRequestPayload( userId, model: selectedModel, ...(provider ? { provider } : {}), + ...(conversationId ? { conversationId } : {}), mode: transportMode, messageId: userMessageId, version: SIM_AGENT_VERSION, + ...(typeof prefetch === 'boolean' ? { prefetch } : {}), ...(contexts && contexts.length > 0 ? { context: contexts } : {}), ...(chatId ? { chatId } : {}), ...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}), diff --git a/apps/sim/lib/copilot/orchestrator/sse-handlers.test.ts b/apps/sim/lib/copilot/orchestrator/sse-handlers.test.ts index fcc5abf433..d18e0f0c1f 100644 --- a/apps/sim/lib/copilot/orchestrator/sse-handlers.test.ts +++ b/apps/sim/lib/copilot/orchestrator/sse-handlers.test.ts @@ -99,4 +99,49 @@ describe('sse-handlers tool lifecycle', () => { expect(executeToolServerSide).toHaveBeenCalledTimes(1) expect(markToolComplete).toHaveBeenCalledTimes(1) }) + + it('does not complete stream on done when tool calls are still pending', () => { + context.toolCalls.set('tool-pending', { + id: 'tool-pending', + name: 'get_user_workflow', + status: 'pending', + startTime: Date.now(), + }) + + sseHandlers.done( + { + type: 'done', + data: { responseId: 'resp-1' }, + } as any, + context, + execContext, + {} + ) + + expect(context.conversationId).toBe('resp-1') + expect(context.streamComplete).toBe(false) + }) + + it('completes stream on done when no tool calls are pending', () => { + context.toolCalls.set('tool-done', { + id: 'tool-done', + name: 'get_user_workflow', + status: 'success', + startTime: Date.now() - 10, + endTime: Date.now(), + }) + + sseHandlers.done( + { + type: 'done', + data: { responseId: 'resp-2' }, + } as any, + context, + execContext, + {} + ) + + expect(context.conversationId).toBe('resp-2') + expect(context.streamComplete).toBe(true) + }) }) diff --git a/apps/sim/lib/copilot/orchestrator/sse-handlers/handlers.ts b/apps/sim/lib/copilot/orchestrator/sse-handlers/handlers.ts index 0f29ef3b35..ba74e986c3 100644 --- a/apps/sim/lib/copilot/orchestrator/sse-handlers/handlers.ts +++ b/apps/sim/lib/copilot/orchestrator/sse-handlers/handlers.ts @@ -57,6 +57,15 @@ function inferToolSuccess(data: Record | undefined): { return { success, hasResultData, hasError } } +function hasPendingToolCalls(context: StreamingContext): boolean { + for (const toolCall of context.toolCalls.values()) { + if (toolCall.status === 'pending' || toolCall.status === 'executing') { + return true + } + } + return false +} + export type SSEHandler = ( event: SSEEvent, context: StreamingContext, @@ -354,6 +363,12 @@ export const sseHandlers: Record = { if (d.responseId) { context.conversationId = d.responseId as string } + // Preserve parity with the legacy stream-forwarding behavior: + // don't finalize the stream on a `done` event while tools are still active. + // Some event sequences may include an early `done` before tail tool events. + if (hasPendingToolCalls(context)) { + return + } context.streamComplete = true }, start: (event, context) => { diff --git a/apps/sim/lib/copilot/orchestrator/tool-executor/index.ts b/apps/sim/lib/copilot/orchestrator/tool-executor/index.ts index 2bd0e6611f..8e6dab8f81 100644 --- a/apps/sim/lib/copilot/orchestrator/tool-executor/index.ts +++ b/apps/sim/lib/copilot/orchestrator/tool-executor/index.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' -import { workflow } from '@sim/db/schema' +import { customTools, workflow } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, desc, eq, isNull, or } from 'drizzle-orm' import { SIM_AGENT_API_URL } from '@/lib/copilot/constants' import type { ExecutionContext, @@ -12,6 +12,7 @@ import { routeExecution } from '@/lib/copilot/tools/server/router' import { env } from '@/lib/core/config/env' import { getBaseUrl } from '@/lib/core/utils/urls' import { getEffectiveDecryptedEnv } from '@/lib/environment/utils' +import { upsertCustomTools } from '@/lib/workflows/custom-tools/operations' import { getTool, resolveToolId } from '@/tools/utils' import { executeCheckDeploymentStatus, @@ -76,6 +77,245 @@ import { const logger = createLogger('CopilotToolExecutor') +type ManageCustomToolOperation = 'add' | 'edit' | 'delete' | 'list' + +interface ManageCustomToolSchema { + type: 'function' + function: { + name: string + description?: string + parameters: Record + } +} + +interface ManageCustomToolParams { + operation?: string + toolId?: string + schema?: ManageCustomToolSchema + code?: string + title?: string + workspaceId?: string +} + +async function executeManageCustomTool( + rawParams: Record, + context: ExecutionContext +): Promise { + const params = rawParams as ManageCustomToolParams + const operation = String(params.operation || '').toLowerCase() as ManageCustomToolOperation + const workspaceId = params.workspaceId || context.workspaceId + + if (!operation) { + return { success: false, error: "Missing required 'operation' argument" } + } + + try { + if (operation === 'list') { + const toolsForUser = workspaceId + ? await db + .select() + .from(customTools) + .where( + or( + eq(customTools.workspaceId, workspaceId), + and(isNull(customTools.workspaceId), eq(customTools.userId, context.userId)) + ) + ) + .orderBy(desc(customTools.createdAt)) + : await db + .select() + .from(customTools) + .where(and(isNull(customTools.workspaceId), eq(customTools.userId, context.userId))) + .orderBy(desc(customTools.createdAt)) + + return { + success: true, + output: { + success: true, + operation, + tools: toolsForUser, + count: toolsForUser.length, + }, + } + } + + if (operation === 'add') { + if (!workspaceId) { + return { + success: false, + error: "workspaceId is required for operation 'add'", + } + } + if (!params.schema || !params.code) { + return { + success: false, + error: "Both 'schema' and 'code' are required for operation 'add'", + } + } + + const title = params.title || params.schema.function?.name + if (!title) { + return { success: false, error: "Missing tool title or schema.function.name for 'add'" } + } + + const resultTools = await upsertCustomTools({ + tools: [ + { + title, + schema: params.schema, + code: params.code, + }, + ], + workspaceId, + userId: context.userId, + }) + const created = resultTools.find((tool) => tool.title === title) + + return { + success: true, + output: { + success: true, + operation, + toolId: created?.id, + title, + message: `Created custom tool "${title}"`, + }, + } + } + + if (operation === 'edit') { + if (!workspaceId) { + return { + success: false, + error: "workspaceId is required for operation 'edit'", + } + } + if (!params.toolId) { + return { success: false, error: "'toolId' is required for operation 'edit'" } + } + if (!params.schema && !params.code) { + return { + success: false, + error: "At least one of 'schema' or 'code' is required for operation 'edit'", + } + } + + const workspaceTool = await db + .select() + .from(customTools) + .where(and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId))) + .limit(1) + + const legacyTool = + workspaceTool.length === 0 + ? await db + .select() + .from(customTools) + .where( + and( + eq(customTools.id, params.toolId), + isNull(customTools.workspaceId), + eq(customTools.userId, context.userId) + ) + ) + .limit(1) + : [] + + const existing = workspaceTool[0] || legacyTool[0] + if (!existing) { + return { success: false, error: `Custom tool not found: ${params.toolId}` } + } + + const mergedSchema = params.schema || (existing.schema as ManageCustomToolSchema) + const mergedCode = params.code || existing.code + const title = params.title || mergedSchema.function?.name || existing.title + + await upsertCustomTools({ + tools: [ + { + id: params.toolId, + title, + schema: mergedSchema, + code: mergedCode, + }, + ], + workspaceId, + userId: context.userId, + }) + + return { + success: true, + output: { + success: true, + operation, + toolId: params.toolId, + title, + message: `Updated custom tool "${title}"`, + }, + } + } + + if (operation === 'delete') { + if (!params.toolId) { + return { success: false, error: "'toolId' is required for operation 'delete'" } + } + + const workspaceDelete = + workspaceId != null + ? await db + .delete(customTools) + .where(and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId))) + .returning({ id: customTools.id }) + : [] + + const legacyDelete = + workspaceDelete.length === 0 + ? await db + .delete(customTools) + .where( + and( + eq(customTools.id, params.toolId), + isNull(customTools.workspaceId), + eq(customTools.userId, context.userId) + ) + ) + .returning({ id: customTools.id }) + : [] + + const deleted = workspaceDelete[0] || legacyDelete[0] + if (!deleted) { + return { success: false, error: `Custom tool not found: ${params.toolId}` } + } + + return { + success: true, + output: { + success: true, + operation, + toolId: params.toolId, + message: 'Deleted custom tool', + }, + } + } + + return { + success: false, + error: `Unsupported operation for manage_custom_tool: ${operation}`, + } + } catch (error) { + logger.error('manage_custom_tool execution failed', { + operation, + workspaceId, + userId: context.userId, + error: error instanceof Error ? error.message : String(error), + }) + return { + success: false, + error: error instanceof Error ? error.message : 'Failed to manage custom tool', + } + } +} + const SERVER_TOOLS = new Set([ 'get_blocks_and_tools', 'get_blocks_metadata', @@ -161,6 +401,19 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record< } } }, + oauth_request_access: async (p, _c) => { + const providerName = (p.providerName || p.provider_name || 'the provider') as string + return { + success: true, + output: { + success: true, + status: 'requested', + providerName, + message: `Requested ${providerName} OAuth connection. The user should complete the OAuth modal in the UI, then retry credential-dependent actions.`, + }, + } + }, + manage_custom_tool: (p, c) => executeManageCustomTool(p, c), } /** diff --git a/apps/sim/stores/panel/copilot/store.ts b/apps/sim/stores/panel/copilot/store.ts index e7261a229d..f767e1a5ba 100644 --- a/apps/sim/stores/panel/copilot/store.ts +++ b/apps/sim/stores/panel/copilot/store.ts @@ -310,6 +310,50 @@ function parseModelKey(compositeKey: string): { provider: string; modelId: strin return { provider: compositeKey.slice(0, slashIdx), modelId: compositeKey.slice(slashIdx + 1) } } +/** + * Convert legacy/variant Claude IDs into the canonical ID shape used by the model catalog. + * + * Examples: + * - claude-4.5-opus -> claude-opus-4-5 + * - claude-opus-4.6 -> claude-opus-4-6 + * - anthropic.claude-opus-4-5-20251101-v1:0 -> claude-opus-4-5 (match key only) + */ +function canonicalizeModelMatchKey(modelId: string): string { + if (!modelId) return modelId + const normalized = modelId.trim().toLowerCase() + + const toCanonicalClaude = (tier: string, version: string): string => { + const normalizedVersion = version.replace(/\./g, '-') + return `claude-${tier}-${normalizedVersion}` + } + + const tierFirstExact = normalized.match(/^claude-(opus|sonnet|haiku)-(\d+(?:[.-]\d+)?)$/) + if (tierFirstExact) { + const [, tier, version] = tierFirstExact + return toCanonicalClaude(tier, version) + } + + const versionFirstExact = normalized.match(/^claude-(\d+(?:[.-]\d+)?)-(opus|sonnet|haiku)$/) + if (versionFirstExact) { + const [, version, tier] = versionFirstExact + return toCanonicalClaude(tier, version) + } + + const tierFirstEmbedded = normalized.match(/claude-(opus|sonnet|haiku)-(\d+(?:[.-]\d+)?)/) + if (tierFirstEmbedded) { + const [, tier, version] = tierFirstEmbedded + return toCanonicalClaude(tier, version) + } + + const versionFirstEmbedded = normalized.match(/claude-(\d+(?:[.-]\d+)?)-(opus|sonnet|haiku)/) + if (versionFirstEmbedded) { + const [, version, tier] = versionFirstEmbedded + return toCanonicalClaude(tier, version) + } + + return normalized +} + const MODEL_PROVIDER_PRIORITY = [ 'anthropic', 'bedrock', @@ -350,12 +394,21 @@ function normalizeSelectedModelKey(selectedModel: string, models: AvailableModel const { provider, modelId } = parseModelKey(selectedModel) const targetModelId = modelId || selectedModel - - const matches = models.filter((m) => m.id.endsWith(`/${targetModelId}`)) + const targetMatchKey = canonicalizeModelMatchKey(targetModelId) + + const matches = models.filter((m) => { + const candidateModelId = parseModelKey(m.id).modelId || m.id + const candidateMatchKey = canonicalizeModelMatchKey(candidateModelId) + return ( + candidateModelId === targetModelId || + m.id.endsWith(`/${targetModelId}`) || + candidateMatchKey === targetMatchKey + ) + }) if (matches.length === 0) return selectedModel if (provider) { - const sameProvider = matches.find((m) => m.provider === provider) + const sameProvider = matches.find((m) => m.provider === provider || m.id.startsWith(`${provider}/`)) if (sameProvider) return sameProvider.id } @@ -1093,11 +1146,12 @@ export const useCopilotStore = create()( const chatConfig = chat.config ?? {} const chatMode = chatConfig.mode || get().mode const chatModel = chatConfig.model || get().selectedModel + const normalizedChatModel = normalizeSelectedModelKey(chatModel, get().availableModels) logger.debug('[Chat] Restoring chat config', { chatId: chat.id, mode: chatMode, - model: chatModel, + model: normalizedChatModel, hasPlanArtifact: !!planArtifact, }) @@ -1119,7 +1173,7 @@ export const useCopilotStore = create()( showPlanTodos: false, streamingPlanContent: planArtifact, mode: chatMode, - selectedModel: chatModel as CopilotStore['selectedModel'], + selectedModel: normalizedChatModel as CopilotStore['selectedModel'], suppressAutoSelect: false, }) @@ -1292,6 +1346,10 @@ export const useCopilotStore = create()( const refreshedConfig = updatedCurrentChat.config ?? {} const refreshedMode = refreshedConfig.mode || get().mode const refreshedModel = refreshedConfig.model || get().selectedModel + const normalizedRefreshedModel = normalizeSelectedModelKey( + refreshedModel, + get().availableModels + ) const toolCallsById = buildToolCallsById(normalizedMessages) set({ @@ -1300,7 +1358,7 @@ export const useCopilotStore = create()( toolCallsById, streamingPlanContent: refreshedPlanArtifact, mode: refreshedMode, - selectedModel: refreshedModel as CopilotStore['selectedModel'], + selectedModel: normalizedRefreshedModel as CopilotStore['selectedModel'], }) } try { @@ -1320,11 +1378,15 @@ export const useCopilotStore = create()( const chatConfig = mostRecentChat.config ?? {} const chatMode = chatConfig.mode || get().mode const chatModel = chatConfig.model || get().selectedModel + const normalizedChatModel = normalizeSelectedModelKey( + chatModel, + get().availableModels + ) logger.info('[Chat] Auto-selecting most recent chat with config', { chatId: mostRecentChat.id, mode: chatMode, - model: chatModel, + model: normalizedChatModel, hasPlanArtifact: !!planArtifact, }) @@ -1336,7 +1398,7 @@ export const useCopilotStore = create()( toolCallsById, streamingPlanContent: planArtifact, mode: chatMode, - selectedModel: chatModel as CopilotStore['selectedModel'], + selectedModel: normalizedChatModel as CopilotStore['selectedModel'], }) try { await get().loadMessageCheckpoints(mostRecentChat.id) @@ -2268,7 +2330,8 @@ export const useCopilotStore = create()( }, setSelectedModel: async (model) => { - set({ selectedModel: model }) + const normalizedModel = normalizeSelectedModelKey(model, get().availableModels) + set({ selectedModel: normalizedModel as CopilotStore['selectedModel'] }) }, setAgentPrefetch: (prefetch) => set({ agentPrefetch: prefetch }), loadAvailableModels: async () => { From 5f3e76d459698452229f66b336b8ea0885391be7 Mon Sep 17 00:00:00 2001 From: Waleed Date: Wed, 11 Feb 2026 14:33:03 -0800 Subject: [PATCH 06/14] fix(variables): fix tag dropdown and cursor alignment in variables block (#3199) --- apps/sim/app/api/copilot/chat/route.ts | 7 ++++--- apps/sim/lib/copilot/orchestrator/tool-executor/index.ts | 4 +++- apps/sim/stores/panel/copilot/store.ts | 4 +++- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/apps/sim/app/api/copilot/chat/route.ts b/apps/sim/app/api/copilot/chat/route.ts index a77506a720..25349e9145 100644 --- a/apps/sim/app/api/copilot/chat/route.ts +++ b/apps/sim/app/api/copilot/chat/route.ts @@ -457,9 +457,10 @@ export async function POST(req: NextRequest) { content: message, timestamp: new Date().toISOString(), ...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }), - ...(Array.isArray(normalizedContexts) && normalizedContexts.length > 0 && { - contexts: normalizedContexts, - }), + ...(Array.isArray(normalizedContexts) && + normalizedContexts.length > 0 && { + contexts: normalizedContexts, + }), ...(Array.isArray(normalizedContexts) && normalizedContexts.length > 0 && { contentBlocks: [ diff --git a/apps/sim/lib/copilot/orchestrator/tool-executor/index.ts b/apps/sim/lib/copilot/orchestrator/tool-executor/index.ts index 8e6dab8f81..829a57a62e 100644 --- a/apps/sim/lib/copilot/orchestrator/tool-executor/index.ts +++ b/apps/sim/lib/copilot/orchestrator/tool-executor/index.ts @@ -264,7 +264,9 @@ async function executeManageCustomTool( workspaceId != null ? await db .delete(customTools) - .where(and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId))) + .where( + and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId)) + ) .returning({ id: customTools.id }) : [] diff --git a/apps/sim/stores/panel/copilot/store.ts b/apps/sim/stores/panel/copilot/store.ts index f767e1a5ba..44f17df101 100644 --- a/apps/sim/stores/panel/copilot/store.ts +++ b/apps/sim/stores/panel/copilot/store.ts @@ -408,7 +408,9 @@ function normalizeSelectedModelKey(selectedModel: string, models: AvailableModel if (matches.length === 0) return selectedModel if (provider) { - const sameProvider = matches.find((m) => m.provider === provider || m.id.startsWith(`${provider}/`)) + const sameProvider = matches.find( + (m) => m.provider === provider || m.id.startsWith(`${provider}/`) + ) if (sameProvider) return sameProvider.id } From 7c4b9256bb53cc74cbb90e3f4df851be80e55d07 Mon Sep 17 00:00:00 2001 From: Waleed Date: Wed, 11 Feb 2026 14:33:30 -0800 Subject: [PATCH 07/14] feat(confluence): added list space labels, delete label, delete page prop (#3201) --- .../docs/content/docs/en/tools/confluence.mdx | 106 +++++++++++++ .../app/api/tools/confluence/labels/route.ts | 81 ++++++++++ .../tools/confluence/pages-by-label/route.ts | 103 +++++++++++++ .../tools/confluence/space-labels/route.ts | 98 ++++++++++++ apps/sim/blocks/blocks/confluence.ts | 85 ++++++++++- apps/sim/tools/confluence/delete_label.ts | 114 ++++++++++++++ .../tools/confluence/delete_page_property.ts | 105 +++++++++++++ .../tools/confluence/get_pages_by_label.ts | 143 ++++++++++++++++++ apps/sim/tools/confluence/index.ts | 8 + .../sim/tools/confluence/list_space_labels.ts | 134 ++++++++++++++++ apps/sim/tools/registry.ts | 8 + 11 files changed, 983 insertions(+), 2 deletions(-) create mode 100644 apps/sim/app/api/tools/confluence/pages-by-label/route.ts create mode 100644 apps/sim/app/api/tools/confluence/space-labels/route.ts create mode 100644 apps/sim/tools/confluence/delete_label.ts create mode 100644 apps/sim/tools/confluence/delete_page_property.ts create mode 100644 apps/sim/tools/confluence/get_pages_by_label.ts create mode 100644 apps/sim/tools/confluence/list_space_labels.ts diff --git a/apps/docs/content/docs/en/tools/confluence.mdx b/apps/docs/content/docs/en/tools/confluence.mdx index b8173f1358..7ee0f0e73e 100644 --- a/apps/docs/content/docs/en/tools/confluence.mdx +++ b/apps/docs/content/docs/en/tools/confluence.mdx @@ -399,6 +399,28 @@ Create a new custom property (metadata) on a Confluence page. | ↳ `authorId` | string | Account ID of the version author | | ↳ `createdAt` | string | ISO 8601 timestamp of version creation | +### `confluence_delete_page_property` + +Delete a content property from a Confluence page by its property ID. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) | +| `pageId` | string | Yes | The ID of the page containing the property | +| `propertyId` | string | Yes | The ID of the property to delete | +| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `ts` | string | ISO 8601 timestamp of the operation | +| `pageId` | string | ID of the page | +| `propertyId` | string | ID of the deleted property | +| `deleted` | boolean | Deletion status | + ### `confluence_search` Search for content across Confluence pages, blog posts, and other content. @@ -872,6 +894,90 @@ Add a label to a Confluence page for organization and categorization. | `labelName` | string | Name of the added label | | `labelId` | string | ID of the added label | +### `confluence_delete_label` + +Remove a label from a Confluence page. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) | +| `pageId` | string | Yes | Confluence page ID to remove the label from | +| `labelName` | string | Yes | Name of the label to remove | +| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `ts` | string | ISO 8601 timestamp of the operation | +| `pageId` | string | Page ID the label was removed from | +| `labelName` | string | Name of the removed label | +| `deleted` | boolean | Deletion status | + +### `confluence_get_pages_by_label` + +Retrieve all pages that have a specific label applied. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) | +| `labelId` | string | Yes | The ID of the label to get pages for | +| `limit` | number | No | Maximum number of pages to return \(default: 50, max: 250\) | +| `cursor` | string | No | Pagination cursor from previous response | +| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `ts` | string | ISO 8601 timestamp of the operation | +| `labelId` | string | ID of the label | +| `pages` | array | Array of pages with this label | +| ↳ `id` | string | Unique page identifier | +| ↳ `title` | string | Page title | +| ↳ `status` | string | Page status \(e.g., current, archived, trashed, draft\) | +| ↳ `spaceId` | string | ID of the space containing the page | +| ↳ `parentId` | string | ID of the parent page \(null if top-level\) | +| ↳ `authorId` | string | Account ID of the page author | +| ↳ `createdAt` | string | ISO 8601 timestamp when the page was created | +| ↳ `version` | object | Page version information | +| ↳ `number` | number | Version number | +| ↳ `message` | string | Version message | +| ↳ `minorEdit` | boolean | Whether this is a minor edit | +| ↳ `authorId` | string | Account ID of the version author | +| ↳ `createdAt` | string | ISO 8601 timestamp of version creation | +| `nextCursor` | string | Cursor for fetching the next page of results | + +### `confluence_list_space_labels` + +List all labels associated with a Confluence space. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) | +| `spaceId` | string | Yes | The ID of the Confluence space to list labels from | +| `limit` | number | No | Maximum number of labels to return \(default: 25, max: 250\) | +| `cursor` | string | No | Pagination cursor from previous response | +| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `ts` | string | ISO 8601 timestamp of the operation | +| `spaceId` | string | ID of the space | +| `labels` | array | Array of labels on the space | +| ↳ `id` | string | Unique label identifier | +| ↳ `name` | string | Label name | +| ↳ `prefix` | string | Label prefix/type \(e.g., global, my, team\) | +| `nextCursor` | string | Cursor for fetching the next page of results | + ### `confluence_get_space` Get details about a specific Confluence space. diff --git a/apps/sim/app/api/tools/confluence/labels/route.ts b/apps/sim/app/api/tools/confluence/labels/route.ts index ac5eb176a9..133267f950 100644 --- a/apps/sim/app/api/tools/confluence/labels/route.ts +++ b/apps/sim/app/api/tools/confluence/labels/route.ts @@ -191,3 +191,84 @@ export async function GET(request: NextRequest) { ) } } + +// Delete a label from a page +export async function DELETE(request: NextRequest) { + try { + const auth = await checkSessionOrInternalAuth(request) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 }) + } + + const { + domain, + accessToken, + cloudId: providedCloudId, + pageId, + labelName, + } = await request.json() + + if (!domain) { + return NextResponse.json({ error: 'Domain is required' }, { status: 400 }) + } + + if (!accessToken) { + return NextResponse.json({ error: 'Access token is required' }, { status: 400 }) + } + + if (!pageId) { + return NextResponse.json({ error: 'Page ID is required' }, { status: 400 }) + } + + if (!labelName) { + return NextResponse.json({ error: 'Label name is required' }, { status: 400 }) + } + + const pageIdValidation = validateAlphanumericId(pageId, 'pageId', 255) + if (!pageIdValidation.isValid) { + return NextResponse.json({ error: pageIdValidation.error }, { status: 400 }) + } + + const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken)) + + const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId') + if (!cloudIdValidation.isValid) { + return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 }) + } + + const encodedLabel = encodeURIComponent(labelName.trim()) + const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/rest/api/content/${pageId}/label/${encodedLabel}` + + const response = await fetch(url, { + method: 'DELETE', + headers: { + Accept: 'application/json', + Authorization: `Bearer ${accessToken}`, + }, + }) + + if (!response.ok) { + const errorData = await response.json().catch(() => null) + logger.error('Confluence API error response:', { + status: response.status, + statusText: response.statusText, + error: JSON.stringify(errorData, null, 2), + }) + const errorMessage = + errorData?.message || `Failed to delete Confluence label (${response.status})` + return NextResponse.json({ error: errorMessage }, { status: response.status }) + } + + return NextResponse.json({ + pageId, + labelName, + deleted: true, + }) + } catch (error) { + logger.error('Error deleting Confluence label:', error) + return NextResponse.json( + { error: (error as Error).message || 'Internal server error' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/confluence/pages-by-label/route.ts b/apps/sim/app/api/tools/confluence/pages-by-label/route.ts new file mode 100644 index 0000000000..bef6226169 --- /dev/null +++ b/apps/sim/app/api/tools/confluence/pages-by-label/route.ts @@ -0,0 +1,103 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation' +import { getConfluenceCloudId } from '@/tools/confluence/utils' + +const logger = createLogger('ConfluencePagesByLabelAPI') + +export const dynamic = 'force-dynamic' + +export async function GET(request: NextRequest) { + try { + const auth = await checkSessionOrInternalAuth(request) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 }) + } + + const { searchParams } = new URL(request.url) + const domain = searchParams.get('domain') + const accessToken = searchParams.get('accessToken') + const labelId = searchParams.get('labelId') + const providedCloudId = searchParams.get('cloudId') + const limit = searchParams.get('limit') || '50' + const cursor = searchParams.get('cursor') + + if (!domain) { + return NextResponse.json({ error: 'Domain is required' }, { status: 400 }) + } + + if (!accessToken) { + return NextResponse.json({ error: 'Access token is required' }, { status: 400 }) + } + + if (!labelId) { + return NextResponse.json({ error: 'Label ID is required' }, { status: 400 }) + } + + const labelIdValidation = validateAlphanumericId(labelId, 'labelId', 255) + if (!labelIdValidation.isValid) { + return NextResponse.json({ error: labelIdValidation.error }, { status: 400 }) + } + + const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken)) + + const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId') + if (!cloudIdValidation.isValid) { + return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 }) + } + + const queryParams = new URLSearchParams() + queryParams.append('limit', String(Math.min(Number(limit), 250))) + if (cursor) { + queryParams.append('cursor', cursor) + } + const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/labels/${labelId}/pages?${queryParams.toString()}` + + const response = await fetch(url, { + method: 'GET', + headers: { + Accept: 'application/json', + Authorization: `Bearer ${accessToken}`, + }, + }) + + if (!response.ok) { + const errorData = await response.json().catch(() => null) + logger.error('Confluence API error response:', { + status: response.status, + statusText: response.statusText, + error: JSON.stringify(errorData, null, 2), + }) + const errorMessage = errorData?.message || `Failed to get pages by label (${response.status})` + return NextResponse.json({ error: errorMessage }, { status: response.status }) + } + + const data = await response.json() + + const pages = (data.results || []).map((page: any) => ({ + id: page.id, + title: page.title, + status: page.status ?? null, + spaceId: page.spaceId ?? null, + parentId: page.parentId ?? null, + authorId: page.authorId ?? null, + createdAt: page.createdAt ?? null, + version: page.version ?? null, + })) + + return NextResponse.json({ + pages, + labelId, + nextCursor: data._links?.next + ? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor') + : null, + }) + } catch (error) { + logger.error('Error getting pages by label:', error) + return NextResponse.json( + { error: (error as Error).message || 'Internal server error' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/confluence/space-labels/route.ts b/apps/sim/app/api/tools/confluence/space-labels/route.ts new file mode 100644 index 0000000000..be28cd2c92 --- /dev/null +++ b/apps/sim/app/api/tools/confluence/space-labels/route.ts @@ -0,0 +1,98 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation' +import { getConfluenceCloudId } from '@/tools/confluence/utils' + +const logger = createLogger('ConfluenceSpaceLabelsAPI') + +export const dynamic = 'force-dynamic' + +export async function GET(request: NextRequest) { + try { + const auth = await checkSessionOrInternalAuth(request) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 }) + } + + const { searchParams } = new URL(request.url) + const domain = searchParams.get('domain') + const accessToken = searchParams.get('accessToken') + const spaceId = searchParams.get('spaceId') + const providedCloudId = searchParams.get('cloudId') + const limit = searchParams.get('limit') || '25' + const cursor = searchParams.get('cursor') + + if (!domain) { + return NextResponse.json({ error: 'Domain is required' }, { status: 400 }) + } + + if (!accessToken) { + return NextResponse.json({ error: 'Access token is required' }, { status: 400 }) + } + + if (!spaceId) { + return NextResponse.json({ error: 'Space ID is required' }, { status: 400 }) + } + + const spaceIdValidation = validateAlphanumericId(spaceId, 'spaceId', 255) + if (!spaceIdValidation.isValid) { + return NextResponse.json({ error: spaceIdValidation.error }, { status: 400 }) + } + + const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken)) + + const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId') + if (!cloudIdValidation.isValid) { + return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 }) + } + + const queryParams = new URLSearchParams() + queryParams.append('limit', String(Math.min(Number(limit), 250))) + if (cursor) { + queryParams.append('cursor', cursor) + } + const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces/${spaceId}/labels?${queryParams.toString()}` + + const response = await fetch(url, { + method: 'GET', + headers: { + Accept: 'application/json', + Authorization: `Bearer ${accessToken}`, + }, + }) + + if (!response.ok) { + const errorData = await response.json().catch(() => null) + logger.error('Confluence API error response:', { + status: response.status, + statusText: response.statusText, + error: JSON.stringify(errorData, null, 2), + }) + const errorMessage = errorData?.message || `Failed to list space labels (${response.status})` + return NextResponse.json({ error: errorMessage }, { status: response.status }) + } + + const data = await response.json() + + const labels = (data.results || []).map((label: any) => ({ + id: label.id, + name: label.name, + prefix: label.prefix || 'global', + })) + + return NextResponse.json({ + labels, + spaceId, + nextCursor: data._links?.next + ? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor') + : null, + }) + } catch (error) { + logger.error('Error listing space labels:', error) + return NextResponse.json( + { error: (error as Error).message || 'Internal server error' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/blocks/blocks/confluence.ts b/apps/sim/blocks/blocks/confluence.ts index 970945c0c3..7b7968843b 100644 --- a/apps/sim/blocks/blocks/confluence.ts +++ b/apps/sim/blocks/blocks/confluence.ts @@ -394,6 +394,7 @@ export const ConfluenceV2Block: BlockConfig = { // Page Property Operations { label: 'List Page Properties', id: 'list_page_properties' }, { label: 'Create Page Property', id: 'create_page_property' }, + { label: 'Delete Page Property', id: 'delete_page_property' }, // Search Operations { label: 'Search Content', id: 'search' }, { label: 'Search in Space', id: 'search_in_space' }, @@ -414,6 +415,9 @@ export const ConfluenceV2Block: BlockConfig = { // Label Operations { label: 'List Labels', id: 'list_labels' }, { label: 'Add Label', id: 'add_label' }, + { label: 'Delete Label', id: 'delete_label' }, + { label: 'Get Pages by Label', id: 'get_pages_by_label' }, + { label: 'List Space Labels', id: 'list_space_labels' }, // Space Operations { label: 'Get Space', id: 'get_space' }, { label: 'List Spaces', id: 'list_spaces' }, @@ -485,6 +489,8 @@ export const ConfluenceV2Block: BlockConfig = { 'search_in_space', 'get_space', 'list_spaces', + 'get_pages_by_label', + 'list_space_labels', ], not: true, }, @@ -500,6 +506,8 @@ export const ConfluenceV2Block: BlockConfig = { 'list_labels', 'upload_attachment', 'add_label', + 'delete_label', + 'delete_page_property', 'get_page_children', 'get_page_ancestors', 'list_page_versions', @@ -527,6 +535,8 @@ export const ConfluenceV2Block: BlockConfig = { 'search_in_space', 'get_space', 'list_spaces', + 'get_pages_by_label', + 'list_space_labels', ], not: true, }, @@ -542,6 +552,8 @@ export const ConfluenceV2Block: BlockConfig = { 'list_labels', 'upload_attachment', 'add_label', + 'delete_label', + 'delete_page_property', 'get_page_children', 'get_page_ancestors', 'list_page_versions', @@ -566,6 +578,7 @@ export const ConfluenceV2Block: BlockConfig = { 'search_in_space', 'create_blogpost', 'list_blogposts_in_space', + 'list_space_labels', ], }, }, @@ -601,6 +614,14 @@ export const ConfluenceV2Block: BlockConfig = { required: true, condition: { field: 'operation', value: 'create_page_property' }, }, + { + id: 'propertyId', + title: 'Property ID', + type: 'short-input', + placeholder: 'Enter property ID to delete', + required: true, + condition: { field: 'operation', value: 'delete_page_property' }, + }, { id: 'title', title: 'Title', @@ -694,7 +715,7 @@ export const ConfluenceV2Block: BlockConfig = { type: 'short-input', placeholder: 'Enter label name', required: true, - condition: { field: 'operation', value: 'add_label' }, + condition: { field: 'operation', value: ['add_label', 'delete_label'] }, }, { id: 'labelPrefix', @@ -709,6 +730,14 @@ export const ConfluenceV2Block: BlockConfig = { value: () => 'global', condition: { field: 'operation', value: 'add_label' }, }, + { + id: 'labelId', + title: 'Label ID', + type: 'short-input', + placeholder: 'Enter label ID', + required: true, + condition: { field: 'operation', value: 'get_pages_by_label' }, + }, { id: 'blogPostStatus', title: 'Status', @@ -759,6 +788,8 @@ export const ConfluenceV2Block: BlockConfig = { 'list_page_versions', 'list_page_properties', 'list_labels', + 'get_pages_by_label', + 'list_space_labels', ], }, }, @@ -780,6 +811,8 @@ export const ConfluenceV2Block: BlockConfig = { 'list_page_versions', 'list_page_properties', 'list_labels', + 'get_pages_by_label', + 'list_space_labels', ], }, }, @@ -800,6 +833,7 @@ export const ConfluenceV2Block: BlockConfig = { // Property Tools 'confluence_list_page_properties', 'confluence_create_page_property', + 'confluence_delete_page_property', // Search Tools 'confluence_search', 'confluence_search_in_space', @@ -820,6 +854,9 @@ export const ConfluenceV2Block: BlockConfig = { // Label Tools 'confluence_list_labels', 'confluence_add_label', + 'confluence_delete_label', + 'confluence_get_pages_by_label', + 'confluence_list_space_labels', // Space Tools 'confluence_get_space', 'confluence_list_spaces', @@ -852,6 +889,8 @@ export const ConfluenceV2Block: BlockConfig = { return 'confluence_list_page_properties' case 'create_page_property': return 'confluence_create_page_property' + case 'delete_page_property': + return 'confluence_delete_page_property' // Search Operations case 'search': return 'confluence_search' @@ -887,6 +926,12 @@ export const ConfluenceV2Block: BlockConfig = { return 'confluence_list_labels' case 'add_label': return 'confluence_add_label' + case 'delete_label': + return 'confluence_delete_label' + case 'get_pages_by_label': + return 'confluence_get_pages_by_label' + case 'list_space_labels': + return 'confluence_list_space_labels' // Space Operations case 'get_space': return 'confluence_get_space' @@ -908,7 +953,9 @@ export const ConfluenceV2Block: BlockConfig = { versionNumber, propertyKey, propertyValue, + propertyId, labelPrefix, + labelId, blogPostStatus, purge, bodyFormat, @@ -959,7 +1006,9 @@ export const ConfluenceV2Block: BlockConfig = { } } - // Operations that support cursor pagination + // Operations that support generic cursor pagination. + // get_pages_by_label and list_space_labels have dedicated handlers + // below that pass cursor along with their required params (labelId, spaceId). const supportsCursor = [ 'list_attachments', 'list_spaces', @@ -996,6 +1045,35 @@ export const ConfluenceV2Block: BlockConfig = { } } + if (operation === 'delete_page_property') { + return { + credential, + pageId: effectivePageId, + operation, + propertyId, + ...rest, + } + } + + if (operation === 'get_pages_by_label') { + return { + credential, + operation, + labelId, + cursor: cursor || undefined, + ...rest, + } + } + + if (operation === 'list_space_labels') { + return { + credential, + operation, + cursor: cursor || undefined, + ...rest, + } + } + if (operation === 'upload_attachment') { const normalizedFile = normalizeFileInput(attachmentFile, { single: true }) if (!normalizedFile) { @@ -1044,7 +1122,9 @@ export const ConfluenceV2Block: BlockConfig = { attachmentFileName: { type: 'string', description: 'Custom file name for attachment' }, attachmentComment: { type: 'string', description: 'Comment for the attachment' }, labelName: { type: 'string', description: 'Label name' }, + labelId: { type: 'string', description: 'Label identifier' }, labelPrefix: { type: 'string', description: 'Label prefix (global, my, team, system)' }, + propertyId: { type: 'string', description: 'Property identifier' }, blogPostStatus: { type: 'string', description: 'Blog post status (current or draft)' }, purge: { type: 'boolean', description: 'Permanently delete instead of moving to trash' }, bodyFormat: { type: 'string', description: 'Body format for comments' }, @@ -1080,6 +1160,7 @@ export const ConfluenceV2Block: BlockConfig = { // Label Results labels: { type: 'array', description: 'List of labels' }, labelName: { type: 'string', description: 'Label name' }, + labelId: { type: 'string', description: 'Label identifier' }, // Space Results spaces: { type: 'array', description: 'List of spaces' }, spaceId: { type: 'string', description: 'Space identifier' }, diff --git a/apps/sim/tools/confluence/delete_label.ts b/apps/sim/tools/confluence/delete_label.ts new file mode 100644 index 0000000000..2f92766fc6 --- /dev/null +++ b/apps/sim/tools/confluence/delete_label.ts @@ -0,0 +1,114 @@ +import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types' +import type { ToolConfig } from '@/tools/types' + +export interface ConfluenceDeleteLabelParams { + accessToken: string + domain: string + pageId: string + labelName: string + cloudId?: string +} + +export interface ConfluenceDeleteLabelResponse { + success: boolean + output: { + ts: string + pageId: string + labelName: string + deleted: boolean + } +} + +export const confluenceDeleteLabelTool: ToolConfig< + ConfluenceDeleteLabelParams, + ConfluenceDeleteLabelResponse +> = { + id: 'confluence_delete_label', + name: 'Confluence Delete Label', + description: 'Remove a label from a Confluence page.', + version: '1.0.0', + + oauth: { + required: true, + provider: 'confluence', + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'OAuth access token for Confluence', + }, + domain: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)', + }, + pageId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Confluence page ID to remove the label from', + }, + labelName: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Name of the label to remove', + }, + cloudId: { + type: 'string', + required: false, + visibility: 'user-only', + description: + 'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.', + }, + }, + + request: { + url: () => '/api/tools/confluence/labels', + method: 'DELETE', + headers: (params: ConfluenceDeleteLabelParams) => ({ + Accept: 'application/json', + Authorization: `Bearer ${params.accessToken}`, + }), + body: (params: ConfluenceDeleteLabelParams) => ({ + domain: params.domain, + accessToken: params.accessToken, + pageId: params.pageId?.trim(), + labelName: params.labelName?.trim(), + cloudId: params.cloudId, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + return { + success: true, + output: { + ts: new Date().toISOString(), + pageId: data.pageId ?? '', + labelName: data.labelName ?? '', + deleted: true, + }, + } + }, + + outputs: { + ts: TIMESTAMP_OUTPUT, + pageId: { + type: 'string', + description: 'Page ID the label was removed from', + }, + labelName: { + type: 'string', + description: 'Name of the removed label', + }, + deleted: { + type: 'boolean', + description: 'Deletion status', + }, + }, +} diff --git a/apps/sim/tools/confluence/delete_page_property.ts b/apps/sim/tools/confluence/delete_page_property.ts new file mode 100644 index 0000000000..d7b6c5fbb4 --- /dev/null +++ b/apps/sim/tools/confluence/delete_page_property.ts @@ -0,0 +1,105 @@ +import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types' +import type { ToolConfig } from '@/tools/types' + +export interface ConfluenceDeletePagePropertyParams { + accessToken: string + domain: string + pageId: string + propertyId: string + cloudId?: string +} + +export interface ConfluenceDeletePagePropertyResponse { + success: boolean + output: { + ts: string + pageId: string + propertyId: string + deleted: boolean + } +} + +export const confluenceDeletePagePropertyTool: ToolConfig< + ConfluenceDeletePagePropertyParams, + ConfluenceDeletePagePropertyResponse +> = { + id: 'confluence_delete_page_property', + name: 'Confluence Delete Page Property', + description: 'Delete a content property from a Confluence page by its property ID.', + version: '1.0.0', + + oauth: { + required: true, + provider: 'confluence', + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'OAuth access token for Confluence', + }, + domain: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)', + }, + pageId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The ID of the page containing the property', + }, + propertyId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The ID of the property to delete', + }, + cloudId: { + type: 'string', + required: false, + visibility: 'user-only', + description: + 'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.', + }, + }, + + request: { + url: () => '/api/tools/confluence/page-properties', + method: 'DELETE', + headers: (params: ConfluenceDeletePagePropertyParams) => ({ + Accept: 'application/json', + Authorization: `Bearer ${params.accessToken}`, + }), + body: (params: ConfluenceDeletePagePropertyParams) => ({ + domain: params.domain, + accessToken: params.accessToken, + pageId: params.pageId?.trim(), + propertyId: params.propertyId?.trim(), + cloudId: params.cloudId, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + return { + success: true, + output: { + ts: new Date().toISOString(), + pageId: data.pageId ?? '', + propertyId: data.propertyId ?? '', + deleted: true, + }, + } + }, + + outputs: { + ts: TIMESTAMP_OUTPUT, + pageId: { type: 'string', description: 'ID of the page' }, + propertyId: { type: 'string', description: 'ID of the deleted property' }, + deleted: { type: 'boolean', description: 'Deletion status' }, + }, +} diff --git a/apps/sim/tools/confluence/get_pages_by_label.ts b/apps/sim/tools/confluence/get_pages_by_label.ts new file mode 100644 index 0000000000..af67210a0b --- /dev/null +++ b/apps/sim/tools/confluence/get_pages_by_label.ts @@ -0,0 +1,143 @@ +import { PAGE_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types' +import type { ToolConfig } from '@/tools/types' + +export interface ConfluenceGetPagesByLabelParams { + accessToken: string + domain: string + labelId: string + limit?: number + cursor?: string + cloudId?: string +} + +export interface ConfluenceGetPagesByLabelResponse { + success: boolean + output: { + ts: string + labelId: string + pages: Array<{ + id: string + title: string + status: string | null + spaceId: string | null + parentId: string | null + authorId: string | null + createdAt: string | null + version: { + number: number + message?: string + createdAt?: string + } | null + }> + nextCursor: string | null + } +} + +export const confluenceGetPagesByLabelTool: ToolConfig< + ConfluenceGetPagesByLabelParams, + ConfluenceGetPagesByLabelResponse +> = { + id: 'confluence_get_pages_by_label', + name: 'Confluence Get Pages by Label', + description: 'Retrieve all pages that have a specific label applied.', + version: '1.0.0', + + oauth: { + required: true, + provider: 'confluence', + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'OAuth access token for Confluence', + }, + domain: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)', + }, + labelId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The ID of the label to get pages for', + }, + limit: { + type: 'number', + required: false, + visibility: 'user-or-llm', + description: 'Maximum number of pages to return (default: 50, max: 250)', + }, + cursor: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Pagination cursor from previous response', + }, + cloudId: { + type: 'string', + required: false, + visibility: 'user-only', + description: + 'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.', + }, + }, + + request: { + url: (params: ConfluenceGetPagesByLabelParams) => { + const query = new URLSearchParams({ + domain: params.domain, + accessToken: params.accessToken, + labelId: params.labelId, + limit: String(params.limit || 50), + }) + if (params.cursor) { + query.set('cursor', params.cursor) + } + if (params.cloudId) { + query.set('cloudId', params.cloudId) + } + return `/api/tools/confluence/pages-by-label?${query.toString()}` + }, + method: 'GET', + headers: (params: ConfluenceGetPagesByLabelParams) => ({ + Accept: 'application/json', + Authorization: `Bearer ${params.accessToken}`, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + return { + success: true, + output: { + ts: new Date().toISOString(), + labelId: data.labelId ?? '', + pages: data.pages ?? [], + nextCursor: data.nextCursor ?? null, + }, + } + }, + + outputs: { + ts: TIMESTAMP_OUTPUT, + labelId: { type: 'string', description: 'ID of the label' }, + pages: { + type: 'array', + description: 'Array of pages with this label', + items: { + type: 'object', + properties: PAGE_ITEM_PROPERTIES, + }, + }, + nextCursor: { + type: 'string', + description: 'Cursor for fetching the next page of results', + optional: true, + }, + }, +} diff --git a/apps/sim/tools/confluence/index.ts b/apps/sim/tools/confluence/index.ts index d78645b15a..2494f32d04 100644 --- a/apps/sim/tools/confluence/index.ts +++ b/apps/sim/tools/confluence/index.ts @@ -5,11 +5,14 @@ import { confluenceCreatePageTool } from '@/tools/confluence/create_page' import { confluenceCreatePagePropertyTool } from '@/tools/confluence/create_page_property' import { confluenceDeleteAttachmentTool } from '@/tools/confluence/delete_attachment' import { confluenceDeleteCommentTool } from '@/tools/confluence/delete_comment' +import { confluenceDeleteLabelTool } from '@/tools/confluence/delete_label' import { confluenceDeletePageTool } from '@/tools/confluence/delete_page' +import { confluenceDeletePagePropertyTool } from '@/tools/confluence/delete_page_property' import { confluenceGetBlogPostTool } from '@/tools/confluence/get_blogpost' import { confluenceGetPageAncestorsTool } from '@/tools/confluence/get_page_ancestors' import { confluenceGetPageChildrenTool } from '@/tools/confluence/get_page_children' import { confluenceGetPageVersionTool } from '@/tools/confluence/get_page_version' +import { confluenceGetPagesByLabelTool } from '@/tools/confluence/get_pages_by_label' import { confluenceGetSpaceTool } from '@/tools/confluence/get_space' import { confluenceListAttachmentsTool } from '@/tools/confluence/list_attachments' import { confluenceListBlogPostsTool } from '@/tools/confluence/list_blogposts' @@ -19,6 +22,7 @@ import { confluenceListLabelsTool } from '@/tools/confluence/list_labels' import { confluenceListPagePropertiesTool } from '@/tools/confluence/list_page_properties' import { confluenceListPageVersionsTool } from '@/tools/confluence/list_page_versions' import { confluenceListPagesInSpaceTool } from '@/tools/confluence/list_pages_in_space' +import { confluenceListSpaceLabelsTool } from '@/tools/confluence/list_space_labels' import { confluenceListSpacesTool } from '@/tools/confluence/list_spaces' import { confluenceRetrieveTool } from '@/tools/confluence/retrieve' import { confluenceSearchTool } from '@/tools/confluence/search' @@ -78,6 +82,7 @@ export { // Page Properties Tools confluenceListPagePropertiesTool, confluenceCreatePagePropertyTool, + confluenceDeletePagePropertyTool, // Blog Post Tools confluenceListBlogPostsTool, confluenceGetBlogPostTool, @@ -98,6 +103,9 @@ export { // Label Tools confluenceListLabelsTool, confluenceAddLabelTool, + confluenceDeleteLabelTool, + confluenceGetPagesByLabelTool, + confluenceListSpaceLabelsTool, // Space Tools confluenceGetSpaceTool, confluenceListSpacesTool, diff --git a/apps/sim/tools/confluence/list_space_labels.ts b/apps/sim/tools/confluence/list_space_labels.ts new file mode 100644 index 0000000000..d30990d06e --- /dev/null +++ b/apps/sim/tools/confluence/list_space_labels.ts @@ -0,0 +1,134 @@ +import { LABEL_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types' +import type { ToolConfig } from '@/tools/types' + +export interface ConfluenceListSpaceLabelsParams { + accessToken: string + domain: string + spaceId: string + limit?: number + cursor?: string + cloudId?: string +} + +export interface ConfluenceListSpaceLabelsResponse { + success: boolean + output: { + ts: string + spaceId: string + labels: Array<{ + id: string + name: string + prefix: string + }> + nextCursor: string | null + } +} + +export const confluenceListSpaceLabelsTool: ToolConfig< + ConfluenceListSpaceLabelsParams, + ConfluenceListSpaceLabelsResponse +> = { + id: 'confluence_list_space_labels', + name: 'Confluence List Space Labels', + description: 'List all labels associated with a Confluence space.', + version: '1.0.0', + + oauth: { + required: true, + provider: 'confluence', + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'OAuth access token for Confluence', + }, + domain: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)', + }, + spaceId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The ID of the Confluence space to list labels from', + }, + limit: { + type: 'number', + required: false, + visibility: 'user-or-llm', + description: 'Maximum number of labels to return (default: 25, max: 250)', + }, + cursor: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Pagination cursor from previous response', + }, + cloudId: { + type: 'string', + required: false, + visibility: 'user-only', + description: + 'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.', + }, + }, + + request: { + url: (params: ConfluenceListSpaceLabelsParams) => { + const query = new URLSearchParams({ + domain: params.domain, + accessToken: params.accessToken, + spaceId: params.spaceId, + limit: String(params.limit || 25), + }) + if (params.cursor) { + query.set('cursor', params.cursor) + } + if (params.cloudId) { + query.set('cloudId', params.cloudId) + } + return `/api/tools/confluence/space-labels?${query.toString()}` + }, + method: 'GET', + headers: (params: ConfluenceListSpaceLabelsParams) => ({ + Accept: 'application/json', + Authorization: `Bearer ${params.accessToken}`, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + return { + success: true, + output: { + ts: new Date().toISOString(), + spaceId: data.spaceId ?? '', + labels: data.labels ?? [], + nextCursor: data.nextCursor ?? null, + }, + } + }, + + outputs: { + ts: TIMESTAMP_OUTPUT, + spaceId: { type: 'string', description: 'ID of the space' }, + labels: { + type: 'array', + description: 'Array of labels on the space', + items: { + type: 'object', + properties: LABEL_ITEM_PROPERTIES, + }, + }, + nextCursor: { + type: 'string', + description: 'Cursor for fetching the next page of results', + optional: true, + }, + }, +} diff --git a/apps/sim/tools/registry.ts b/apps/sim/tools/registry.ts index 7411c53c57..52506d7448 100644 --- a/apps/sim/tools/registry.ts +++ b/apps/sim/tools/registry.ts @@ -118,10 +118,13 @@ import { confluenceCreatePageTool, confluenceDeleteAttachmentTool, confluenceDeleteCommentTool, + confluenceDeleteLabelTool, + confluenceDeletePagePropertyTool, confluenceDeletePageTool, confluenceGetBlogPostTool, confluenceGetPageAncestorsTool, confluenceGetPageChildrenTool, + confluenceGetPagesByLabelTool, confluenceGetPageVersionTool, confluenceGetSpaceTool, confluenceListAttachmentsTool, @@ -132,6 +135,7 @@ import { confluenceListPagePropertiesTool, confluenceListPagesInSpaceTool, confluenceListPageVersionsTool, + confluenceListSpaceLabelsTool, confluenceListSpacesTool, confluenceRetrieveTool, confluenceSearchInSpaceTool, @@ -2667,6 +2671,10 @@ export const tools: Record = { confluence_delete_attachment: confluenceDeleteAttachmentTool, confluence_list_labels: confluenceListLabelsTool, confluence_add_label: confluenceAddLabelTool, + confluence_get_pages_by_label: confluenceGetPagesByLabelTool, + confluence_list_space_labels: confluenceListSpaceLabelsTool, + confluence_delete_label: confluenceDeleteLabelTool, + confluence_delete_page_property: confluenceDeletePagePropertyTool, confluence_get_space: confluenceGetSpaceTool, confluence_list_spaces: confluenceListSpacesTool, cursor_list_agents: cursorListAgentsTool, From 51bc5b54e888124f0731460bdbbe495cbc13627c Mon Sep 17 00:00:00 2001 From: waleed Date: Wed, 11 Feb 2026 14:45:36 -0800 Subject: [PATCH 08/14] updated route --- apps/sim/app/api/tools/confluence/labels/route.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/apps/sim/app/api/tools/confluence/labels/route.ts b/apps/sim/app/api/tools/confluence/labels/route.ts index 133267f950..7003f2f63b 100644 --- a/apps/sim/app/api/tools/confluence/labels/route.ts +++ b/apps/sim/app/api/tools/confluence/labels/route.ts @@ -236,8 +236,7 @@ export async function DELETE(request: NextRequest) { return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 }) } - const encodedLabel = encodeURIComponent(labelName.trim()) - const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/rest/api/content/${pageId}/label/${encodedLabel}` + const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/rest/api/content/${pageId}/label?name=${encodeURIComponent(labelName.trim())}` const response = await fetch(url, { method: 'DELETE', From 3bb271d17f1ebd167e41a9e866df13e242e01738 Mon Sep 17 00:00:00 2001 From: waleed Date: Wed, 11 Feb 2026 14:51:36 -0800 Subject: [PATCH 09/14] ack comments --- .../w/[workflowId]/hooks/use-workflow-execution.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts index 9df2754648..b359ff34b2 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts @@ -1388,6 +1388,7 @@ export function useWorkflowExecution() { onExecutionCompleted: (data) => { if ( activeWorkflowId && + executionIdRef.current && useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !== executionIdRef.current ) @@ -1459,6 +1460,7 @@ export function useWorkflowExecution() { onExecutionError: (data) => { if ( activeWorkflowId && + executionIdRef.current && useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !== executionIdRef.current ) @@ -1492,6 +1494,7 @@ export function useWorkflowExecution() { onExecutionCancelled: (data) => { if ( activeWorkflowId && + executionIdRef.current && useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !== executionIdRef.current ) @@ -2196,6 +2199,7 @@ export function useWorkflowExecution() { }) .catch((error) => { logger.warn('Execution reconnection failed', { executionId, error }) + clearExecutionEntries(executionId) for (const entry of runningEntries.filter((e) => e.executionId === executionId)) { addConsole({ workflowId: entry.workflowId, From 61a6b1884ba183729dfef4563acb97d970c9392c Mon Sep 17 00:00:00 2001 From: waleed Date: Wed, 11 Feb 2026 15:43:54 -0800 Subject: [PATCH 10/14] fix(execution): reset execution state in reconnection cleanup to unblock re-entry Co-Authored-By: Claude Opus 4.6 --- .../w/[workflowId]/hooks/use-workflow-execution.ts | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts index b359ff34b2..4bfa88ca9c 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts @@ -2219,6 +2219,13 @@ export function useWorkflowExecution() { return () => { executionStream.cancel(reconnectWorkflowId) + // Reset execution state so the SPA guard doesn't block the next reconnection + // attempt when the user navigates back to this workflow. + // The cancel above causes an AbortError which is swallowed by + // isClientDisconnectError, so the .catch() block never fires. + setCurrentExecutionId(reconnectWorkflowId, null) + setIsExecuting(reconnectWorkflowId, false) + setActiveBlocks(reconnectWorkflowId, new Set()) } // eslint-disable-next-line react-hooks/exhaustive-deps }, [activeWorkflowId, hasHydrated]) From 7af4025f61b68f10e52cc7a5aba88dd3542042b4 Mon Sep 17 00:00:00 2001 From: waleed Date: Wed, 11 Feb 2026 15:56:18 -0800 Subject: [PATCH 11/14] fix(execution): restore running entries when reconnection is interrupted by navigation Co-Authored-By: Claude Opus 4.6 --- .../hooks/use-workflow-execution.ts | 29 +++++++++++++++---- 1 file changed, 24 insertions(+), 5 deletions(-) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts index 4bfa88ca9c..036d0e41db 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts @@ -2137,10 +2137,17 @@ export function useWorkflowExecution() { includeStartConsoleEntry: true, }) + // Save original running entries so we can restore them if reconnection is interrupted. + // This ensures the next mount can retry reconnection. + const originalEntries = runningEntries + .filter((e) => e.executionId === executionId) + .map((e) => ({ ...e })) + // Defer clearing old entries until the first reconnection event arrives. // This keeps hydrated entries visible during the network round-trip, // avoiding a flash of empty console. let cleared = false + let reconnectionComplete = false const clearOnce = () => { if (!cleared) { cleared = true @@ -2169,12 +2176,14 @@ export function useWorkflowExecution() { }, onExecutionCompleted: () => { clearOnce() + reconnectionComplete = true setCurrentExecutionId(reconnectWorkflowId, null) setIsExecuting(reconnectWorkflowId, false) setActiveBlocks(reconnectWorkflowId, new Set()) }, onExecutionError: (data) => { clearOnce() + reconnectionComplete = true setCurrentExecutionId(reconnectWorkflowId, null) setIsExecuting(reconnectWorkflowId, false) setActiveBlocks(reconnectWorkflowId, new Set()) @@ -2187,6 +2196,7 @@ export function useWorkflowExecution() { }, onExecutionCancelled: () => { clearOnce() + reconnectionComplete = true setCurrentExecutionId(reconnectWorkflowId, null) setIsExecuting(reconnectWorkflowId, false) setActiveBlocks(reconnectWorkflowId, new Set()) @@ -2198,9 +2208,10 @@ export function useWorkflowExecution() { }, }) .catch((error) => { + reconnectionComplete = true logger.warn('Execution reconnection failed', { executionId, error }) clearExecutionEntries(executionId) - for (const entry of runningEntries.filter((e) => e.executionId === executionId)) { + for (const entry of originalEntries) { addConsole({ workflowId: entry.workflowId, blockId: entry.blockId, @@ -2219,10 +2230,18 @@ export function useWorkflowExecution() { return () => { executionStream.cancel(reconnectWorkflowId) - // Reset execution state so the SPA guard doesn't block the next reconnection - // attempt when the user navigates back to this workflow. - // The cancel above causes an AbortError which is swallowed by - // isClientDisconnectError, so the .catch() block never fires. + + // If reconnection was interrupted (clearOnce fired but no terminal event arrived), + // restore the original running entries so the next mount can retry. + // cancel() causes an AbortError which is swallowed by isClientDisconnectError, + // so the .catch() block never fires — we must handle cleanup here. + if (cleared && !reconnectionComplete) { + clearExecutionEntries(executionId) + for (const entry of originalEntries) { + addConsole(entry) + } + } + setCurrentExecutionId(reconnectWorkflowId, null) setIsExecuting(reconnectWorkflowId, false) setActiveBlocks(reconnectWorkflowId, new Set()) From 5b8dba01908d4bce4576a95f6507f9710432a47c Mon Sep 17 00:00:00 2001 From: waleed Date: Wed, 11 Feb 2026 17:51:53 -0800 Subject: [PATCH 12/14] done --- .../app/api/workflows/[id]/execute/route.ts | 8 +- .../executions/[executionId]/stream/route.ts | 18 +- .../hooks/use-workflow-execution.ts | 202 +++++++++++------- apps/sim/hooks/use-execution-stream.ts | 73 ++++--- apps/sim/lib/execution/event-buffer.ts | 9 +- 5 files changed, 203 insertions(+), 107 deletions(-) diff --git a/apps/sim/app/api/workflows/[id]/execute/route.ts b/apps/sim/app/api/workflows/[id]/execute/route.ts index a3c77e7f29..984fecc15f 100644 --- a/apps/sim/app/api/workflows/[id]/execute/route.ts +++ b/apps/sim/app/api/workflows/[id]/execute/route.ts @@ -1034,7 +1034,13 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: }) finalMetaStatus = 'error' } finally { - await eventWriter.close() + try { + await eventWriter.close() + } catch (closeError) { + logger.warn(`[${requestId}] Failed to close event writer`, { + error: closeError instanceof Error ? closeError.message : String(closeError), + }) + } if (finalMetaStatus) { setExecutionMeta(executionId, { status: finalMetaStatus }).catch(() => {}) } diff --git a/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts b/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts index bb55c62b4b..da4725b59c 100644 --- a/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts +++ b/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts @@ -13,6 +13,7 @@ import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils' const logger = createLogger('ExecutionStreamReconnectAPI') const POLL_INTERVAL_MS = 500 +const MAX_POLL_DURATION_MS = 10 * 60 * 1000 // 10 minutes function isTerminalStatus(status: ExecutionStreamStatus): boolean { return status === 'complete' || status === 'error' || status === 'cancelled' @@ -70,10 +71,13 @@ export async function GET( const encoder = new TextEncoder() + // Hoisted so cancel() can signal the polling loop to stop + let closed = false + const stream = new ReadableStream({ async start(controller) { let lastEventId = fromEventId - let closed = false + const pollDeadline = Date.now() + MAX_POLL_DURATION_MS const enqueue = (text: string) => { if (closed) return @@ -101,8 +105,8 @@ export async function GET( return } - // Poll for new events until execution completes - while (!closed) { + // Poll for new events until execution completes or deadline is reached + while (!closed && Date.now() < pollDeadline) { await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS)) if (closed) return @@ -127,6 +131,13 @@ export async function GET( return } } + + // Deadline reached — close gracefully + if (!closed) { + logger.warn('Reconnection stream poll deadline reached', { executionId }) + enqueue('data: [DONE]\n\n') + controller.close() + } } catch (error) { logger.error('Error in reconnection stream', { executionId, @@ -140,6 +151,7 @@ export async function GET( } }, cancel() { + closed = true logger.info('Client disconnected from reconnection stream', { executionId }) }, }) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts index 036d0e41db..b3f0d9ac75 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts @@ -46,6 +46,13 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store' const logger = createLogger('useWorkflowExecution') +/** + * Module-level Set tracking which workflows have an active reconnection effect. + * Prevents multiple hook instances (from different components) from starting + * concurrent reconnection streams for the same workflow during the same mount cycle. + */ +const activeReconnections = new Set() + // Debug state validation result interface DebugValidationResult { isValid: boolean @@ -1016,24 +1023,6 @@ export function useWorkflowExecution() { if (result.metadata.pendingBlocks) { setPendingBlocks(activeWorkflowId, result.metadata.pendingBlocks) } - } else if (result && 'success' in result) { - setExecutionResult(result) - // Reset execution state after successful non-debug execution - setIsExecuting(activeWorkflowId, false) - setIsDebugging(activeWorkflowId, false) - setActiveBlocks(activeWorkflowId, new Set()) - - if (isChatExecution) { - if (!result.metadata) { - result.metadata = { duration: 0, startTime: new Date().toISOString() } - } - ;(result.metadata as any).source = 'chat' - } - - // Invalidate subscription queries to update usage - setTimeout(() => { - queryClient.invalidateQueries({ queryKey: subscriptionKeys.all }) - }, 1000) } return result } catch (error: any) { @@ -1455,6 +1444,19 @@ export function useWorkflowExecution() { }) } } + + // Reset execution state (unless in debug mode — debug session manages its own lifecycle) + const workflowExecState = activeWorkflowId + ? useExecutionStore.getState().getWorkflowExecution(activeWorkflowId) + : null + if (activeWorkflowId && !workflowExecState?.isDebugging) { + setExecutionResult(executionResult) + setIsExecuting(activeWorkflowId, false) + setActiveBlocks(activeWorkflowId, new Set()) + setTimeout(() => { + queryClient.invalidateQueries({ queryKey: subscriptionKeys.all }) + }, 1000) + } }, onExecutionError: (data) => { @@ -1489,6 +1491,12 @@ export function useWorkflowExecution() { blockLogs: accumulatedBlockLogs, isPreExecutionError, }) + + if (activeWorkflowId) { + setIsExecuting(activeWorkflowId, false) + setIsDebugging(activeWorkflowId, false) + setActiveBlocks(activeWorkflowId, new Set()) + } }, onExecutionCancelled: (data) => { @@ -1509,6 +1517,12 @@ export function useWorkflowExecution() { executionId: executionIdRef.current, durationMs: data?.duration, }) + + if (activeWorkflowId) { + setIsExecuting(activeWorkflowId, false) + setIsDebugging(activeWorkflowId, false) + setActiveBlocks(activeWorkflowId, new Set()) + } }, }, }) @@ -1520,19 +1534,9 @@ export function useWorkflowExecution() { if (error.name === 'AbortError' || error.message?.includes('aborted')) { logger.info('Execution aborted by user') - // Reset execution state - if (activeWorkflowId) { - setIsExecuting(activeWorkflowId, false) - setActiveBlocks(activeWorkflowId, new Set()) - } - - // Return gracefully without error - return { - success: false, - output: {}, - metadata: { duration: 0 }, - logs: [], - } + // Return gracefully with accumulated data (no state reset — + // event callbacks or explicit cancel handle that) + return executionResult } logger.error('Server-side execution failed:', error) @@ -1777,36 +1781,29 @@ export function useWorkflowExecution() { // 1. Read + clear execution ID first so the isStaleExecution guard // blocks any further SSE callbacks from the old execution. const storedExecutionId = activeWorkflowId ? getCurrentExecutionId(activeWorkflowId) : null - if (activeWorkflowId) { - setCurrentExecutionId(activeWorkflowId, null) - } + if (!activeWorkflowId || !storedExecutionId) return + setCurrentExecutionId(activeWorkflowId, null) // 2. Send cancel signal to server via stored executionId - if (activeWorkflowId && storedExecutionId) { - fetch(`/api/workflows/${activeWorkflowId}/executions/${storedExecutionId}/cancel`, { - method: 'POST', - }).catch(() => {}) - } + fetch(`/api/workflows/${activeWorkflowId}/executions/${storedExecutionId}/cancel`, { + method: 'POST', + }).catch(() => {}) // 3. Abort local SSE stream (if still connected) - executionStream.cancel(activeWorkflowId ?? undefined) + executionStream.cancel(activeWorkflowId) // 4. Update terminal: mark running entries as cancelled + add "Execution Cancelled" entry - if (activeWorkflowId) { - handleExecutionCancelledConsole({ - workflowId: activeWorkflowId, - executionId: storedExecutionId ?? undefined, - }) - } + handleExecutionCancelledConsole({ + workflowId: activeWorkflowId, + executionId: storedExecutionId, + }) currentChatExecutionIdRef.current = null // 5. Reset remaining execution state - if (activeWorkflowId) { - setIsExecuting(activeWorkflowId, false) - setIsDebugging(activeWorkflowId, false) - setActiveBlocks(activeWorkflowId, new Set()) - } + setIsExecuting(activeWorkflowId, false) + setIsDebugging(activeWorkflowId, false) + setActiveBlocks(activeWorkflowId, new Set()) if (isDebugging) { resetDebugState() @@ -1980,6 +1977,10 @@ export function useWorkflowExecution() { } setLastExecutionSnapshot(workflowId, updatedSnapshot) } + + setCurrentExecutionId(workflowId, null) + setIsExecuting(workflowId, false) + setActiveBlocks(workflowId, new Set()) }, onExecutionError: (data) => { @@ -2004,6 +2005,10 @@ export function useWorkflowExecution() { durationMs: data.duration, blockLogs: accumulatedBlockLogs, }) + + setCurrentExecutionId(workflowId, null) + setIsExecuting(workflowId, false) + setActiveBlocks(workflowId, new Set()) }, onExecutionCancelled: (data) => { @@ -2012,6 +2017,10 @@ export function useWorkflowExecution() { executionId: executionIdRef.current, durationMs: data?.duration, }) + + setCurrentExecutionId(workflowId, null) + setIsExecuting(workflowId, false) + setActiveBlocks(workflowId, new Set()) }, }, }) @@ -2020,15 +2029,19 @@ export function useWorkflowExecution() { logger.error('Run-from-block failed:', error) } } finally { - setCurrentExecutionId(workflowId, null) - setIsExecuting(workflowId, false) - setActiveBlocks(workflowId, new Set()) + const currentId = getCurrentExecutionId(workflowId) + if (currentId !== null) { + setCurrentExecutionId(workflowId, null) + setIsExecuting(workflowId, false) + setActiveBlocks(workflowId, new Set()) + } } }, [ getLastExecutionSnapshot, setLastExecutionSnapshot, clearLastExecutionSnapshot, + getCurrentExecutionId, setCurrentExecutionId, setIsExecuting, setActiveBlocks, @@ -2067,36 +2080,50 @@ export function useWorkflowExecution() { 'manual', blockId ) - if (result && 'success' in result) { - setExecutionResult(result) - } } catch (error) { const errorResult = handleExecutionError(error, { executionId }) return errorResult } finally { - setIsExecuting(workflowId, false) - setIsDebugging(workflowId, false) - setActiveBlocks(workflowId, new Set()) + const currentId = getCurrentExecutionId(workflowId) + if (currentId !== null) { + setCurrentExecutionId(workflowId, null) + setIsExecuting(workflowId, false) + setIsDebugging(workflowId, false) + setActiveBlocks(workflowId, new Set()) + } } }, - [activeWorkflowId, setExecutionResult, setIsExecuting, setIsDebugging, setActiveBlocks] + [ + activeWorkflowId, + getCurrentExecutionId, + setCurrentExecutionId, + setExecutionResult, + setIsExecuting, + setIsDebugging, + setActiveBlocks, + ] ) useEffect(() => { if (!activeWorkflowId || !hasHydrated) return - // Only attempt reconnection after a full page refresh. - // The execution store is NOT persisted, so isExecuting resets to false on refresh. - // During SPA navigation, isExecuting is still true → the SSE connection is alive. - const workflowExecState = useExecutionStore.getState().workflowExecutions.get(activeWorkflowId) - if (workflowExecState?.isExecuting) return - const entries = useTerminalConsoleStore.getState().entries const runningEntries = entries.filter( (e) => e.isRunning && e.workflowId === activeWorkflowId && e.executionId ) if (runningEntries.length === 0) return + // Coordination guard: only ONE instance of this hook should start reconnection. + // Multiple components mount useWorkflowExecution simultaneously; the first + // instance to reach here claims the lock, subsequent instances bail out. + // This is separate from currentExecutionId (which is set by normal execution too). + if (activeReconnections.has(activeWorkflowId)) return + activeReconnections.add(activeWorkflowId) + + // Cancel any existing SSE stream for this workflow (SPA nav: old stream may be alive). + // Uses module-level shared AbortControllers, so this cancels streams from ANY instance. + executionStream.cancel(activeWorkflowId) + // Pick the most recent execution by startedAt timestamp. // Old zombie entries from previous executions may still have isRunning=true. const sorted = [...runningEntries].sort((a, b) => { @@ -2137,9 +2164,9 @@ export function useWorkflowExecution() { includeStartConsoleEntry: true, }) - // Save original running entries so we can restore them if reconnection is interrupted. - // This ensures the next mount can retry reconnection. - const originalEntries = runningEntries + // Save ALL entries for this execution so we can restore them if reconnection is interrupted. + // clearExecutionEntries removes completed AND running entries, so we must save both. + const originalEntries = entries .filter((e) => e.executionId === executionId) .map((e) => ({ ...e })) @@ -2175,15 +2202,35 @@ export function useWorkflowExecution() { handlers.onBlockError(data) }, onExecutionCompleted: () => { + // Stale check: if another path (e.g. handleCancelExecution) already + // cleared currentExecutionId, skip to avoid duplicate state updates. + const currentId = useExecutionStore + .getState() + .getCurrentExecutionId(reconnectWorkflowId) + if (currentId !== executionId) { + reconnectionComplete = true + activeReconnections.delete(reconnectWorkflowId) + return + } clearOnce() reconnectionComplete = true + activeReconnections.delete(reconnectWorkflowId) setCurrentExecutionId(reconnectWorkflowId, null) setIsExecuting(reconnectWorkflowId, false) setActiveBlocks(reconnectWorkflowId, new Set()) }, onExecutionError: (data) => { + const currentId = useExecutionStore + .getState() + .getCurrentExecutionId(reconnectWorkflowId) + if (currentId !== executionId) { + reconnectionComplete = true + activeReconnections.delete(reconnectWorkflowId) + return + } clearOnce() reconnectionComplete = true + activeReconnections.delete(reconnectWorkflowId) setCurrentExecutionId(reconnectWorkflowId, null) setIsExecuting(reconnectWorkflowId, false) setActiveBlocks(reconnectWorkflowId, new Set()) @@ -2195,8 +2242,17 @@ export function useWorkflowExecution() { }) }, onExecutionCancelled: () => { + const currentId = useExecutionStore + .getState() + .getCurrentExecutionId(reconnectWorkflowId) + if (currentId !== executionId) { + reconnectionComplete = true + activeReconnections.delete(reconnectWorkflowId) + return + } clearOnce() reconnectionComplete = true + activeReconnections.delete(reconnectWorkflowId) setCurrentExecutionId(reconnectWorkflowId, null) setIsExecuting(reconnectWorkflowId, false) setActiveBlocks(reconnectWorkflowId, new Set()) @@ -2209,6 +2265,7 @@ export function useWorkflowExecution() { }) .catch((error) => { reconnectionComplete = true + activeReconnections.delete(reconnectWorkflowId) logger.warn('Execution reconnection failed', { executionId, error }) clearExecutionEntries(executionId) for (const entry of originalEntries) { @@ -2230,6 +2287,7 @@ export function useWorkflowExecution() { return () => { executionStream.cancel(reconnectWorkflowId) + activeReconnections.delete(reconnectWorkflowId) // If reconnection was interrupted (clearOnce fired but no terminal event arrived), // restore the original running entries so the next mount can retry. @@ -2241,10 +2299,6 @@ export function useWorkflowExecution() { addConsole(entry) } } - - setCurrentExecutionId(reconnectWorkflowId, null) - setIsExecuting(reconnectWorkflowId, false) - setActiveBlocks(reconnectWorkflowId, new Set()) } // eslint-disable-next-line react-hooks/exhaustive-deps }, [activeWorkflowId, hasHydrated]) diff --git a/apps/sim/hooks/use-execution-stream.ts b/apps/sim/hooks/use-execution-stream.ts index d2fbf6fadb..fac63b013b 100644 --- a/apps/sim/hooks/use-execution-stream.ts +++ b/apps/sim/hooks/use-execution-stream.ts @@ -1,4 +1,4 @@ -import { useCallback, useRef } from 'react' +import { useCallback } from 'react' import { createLogger } from '@sim/logger' import type { BlockCompletedData, @@ -153,27 +153,31 @@ export interface ReconnectStreamOptions { callbacks?: ExecutionStreamCallbacks } +/** + * Module-level maps shared across all hook instances. + * This ensures ANY instance can cancel streams started by ANY other instance, + * which is critical for SPA navigation where the original hook instance unmounts + * but the SSE stream must be cancellable from the new instance. + */ +const sharedAbortControllers = new Map() +const sharedCurrentExecutions = new Map() + /** * Hook for executing workflows via server-side SSE streaming. * Supports concurrent executions via per-workflow AbortController maps. */ export function useExecutionStream() { - const abortControllersRef = useRef>(new Map()) - const currentExecutionsRef = useRef>( - new Map() - ) - const execute = useCallback(async (options: ExecuteStreamOptions) => { const { workflowId, callbacks = {}, onExecutionId, ...payload } = options - const existing = abortControllersRef.current.get(workflowId) + const existing = sharedAbortControllers.get(workflowId) if (existing) { existing.abort() } const abortController = new AbortController() - abortControllersRef.current.set(workflowId, abortController) - currentExecutionsRef.current.delete(workflowId) + sharedAbortControllers.set(workflowId, abortController) + sharedCurrentExecutions.delete(workflowId) try { const response = await fetch(`/api/workflows/${workflowId}/execute`, { @@ -200,7 +204,7 @@ export function useExecutionStream() { const serverExecutionId = response.headers.get('X-Execution-Id') if (serverExecutionId) { - currentExecutionsRef.current.set(workflowId, { workflowId, executionId: serverExecutionId }) + sharedCurrentExecutions.set(workflowId, { workflowId, executionId: serverExecutionId }) onExecutionId?.(serverExecutionId) } @@ -218,8 +222,12 @@ export function useExecutionStream() { }) throw error } finally { - abortControllersRef.current.delete(workflowId) - currentExecutionsRef.current.delete(workflowId) + // Only clean up if this is still our controller — a concurrent stream + // (e.g. reconnection) may have replaced it while we were running. + if (sharedAbortControllers.get(workflowId) === abortController) { + sharedAbortControllers.delete(workflowId) + sharedCurrentExecutions.delete(workflowId) + } } }, []) @@ -233,14 +241,14 @@ export function useExecutionStream() { callbacks = {}, } = options - const existing = abortControllersRef.current.get(workflowId) + const existing = sharedAbortControllers.get(workflowId) if (existing) { existing.abort() } const abortController = new AbortController() - abortControllersRef.current.set(workflowId, abortController) - currentExecutionsRef.current.delete(workflowId) + sharedAbortControllers.set(workflowId, abortController) + sharedCurrentExecutions.delete(workflowId) try { const response = await fetch(`/api/workflows/${workflowId}/execute`, { @@ -276,7 +284,7 @@ export function useExecutionStream() { const serverExecutionId = response.headers.get('X-Execution-Id') if (serverExecutionId) { - currentExecutionsRef.current.set(workflowId, { workflowId, executionId: serverExecutionId }) + sharedCurrentExecutions.set(workflowId, { workflowId, executionId: serverExecutionId }) onExecutionId?.(serverExecutionId) } @@ -294,17 +302,24 @@ export function useExecutionStream() { }) throw error } finally { - abortControllersRef.current.delete(workflowId) - currentExecutionsRef.current.delete(workflowId) + if (sharedAbortControllers.get(workflowId) === abortController) { + sharedAbortControllers.delete(workflowId) + sharedCurrentExecutions.delete(workflowId) + } } }, []) const reconnect = useCallback(async (options: ReconnectStreamOptions) => { const { workflowId, executionId, fromEventId = 0, callbacks = {} } = options + const existing = sharedAbortControllers.get(workflowId) + if (existing) { + existing.abort() + } + const abortController = new AbortController() - abortControllersRef.current.set(workflowId, abortController) - currentExecutionsRef.current.set(workflowId, { workflowId, executionId }) + sharedAbortControllers.set(workflowId, abortController) + sharedCurrentExecutions.set(workflowId, { workflowId, executionId }) try { const response = await fetch( @@ -320,25 +335,27 @@ export function useExecutionStream() { logger.error('Reconnection stream error:', error) throw error } finally { - abortControllersRef.current.delete(workflowId) - currentExecutionsRef.current.delete(workflowId) + if (sharedAbortControllers.get(workflowId) === abortController) { + sharedAbortControllers.delete(workflowId) + sharedCurrentExecutions.delete(workflowId) + } } }, []) const cancel = useCallback((workflowId?: string) => { if (workflowId) { - const controller = abortControllersRef.current.get(workflowId) + const controller = sharedAbortControllers.get(workflowId) if (controller) { controller.abort() - abortControllersRef.current.delete(workflowId) + sharedAbortControllers.delete(workflowId) } - currentExecutionsRef.current.delete(workflowId) + sharedCurrentExecutions.delete(workflowId) } else { - for (const [, controller] of abortControllersRef.current) { + for (const [, controller] of sharedAbortControllers) { controller.abort() } - abortControllersRef.current.clear() - currentExecutionsRef.current.clear() + sharedAbortControllers.clear() + sharedCurrentExecutions.clear() } }, []) diff --git a/apps/sim/lib/execution/event-buffer.ts b/apps/sim/lib/execution/event-buffer.ts index e79c29c05e..2323fc7bad 100644 --- a/apps/sim/lib/execution/event-buffer.ts +++ b/apps/sim/lib/execution/event-buffer.ts @@ -222,7 +222,14 @@ export function createExecutionEventWriter(executionId: string): ExecutionEventW clearTimeout(flushTimer) flushTimer = null } - await flush() + // Wait for any in-flight flush to complete + if (flushPromise) { + await flushPromise + } + // Drain any remaining events that accumulated during the wait + if (pending.length > 0) { + await doFlush() + } } return { write, flush, close } From 19ccefa0f412b73722066c321dd9f8a9131d60de Mon Sep 17 00:00:00 2001 From: waleed Date: Wed, 11 Feb 2026 18:15:18 -0800 Subject: [PATCH 13/14] remove cast in ioredis types --- apps/sim/lib/execution/event-buffer.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/sim/lib/execution/event-buffer.ts b/apps/sim/lib/execution/event-buffer.ts index 2323fc7bad..95521674b5 100644 --- a/apps/sim/lib/execution/event-buffer.ts +++ b/apps/sim/lib/execution/event-buffer.ts @@ -170,7 +170,7 @@ export function createExecutionEventWriter(executionId: string): ExecutionEventW zaddArgs.push(entry.eventId, JSON.stringify(entry)) } const pipeline = redis.pipeline() - pipeline.zadd(key, ...(zaddArgs as [number, string])) + pipeline.zadd(key, ...zaddArgs) pipeline.expire(key, TTL_SECONDS) pipeline.expire(getSeqKey(executionId), TTL_SECONDS) pipeline.zremrangebyrank(key, 0, -EVENT_LIMIT - 1) From 0634d055886330e78d9906910010fc88dde87a3a Mon Sep 17 00:00:00 2001 From: waleed Date: Wed, 11 Feb 2026 18:59:41 -0800 Subject: [PATCH 14/14] ack PR comments --- .../[id]/deployments/[version]/route.ts | 2 +- .../app/api/workflows/[id]/execute/route.ts | 2 - .../executions/[executionId]/stream/route.ts | 6 -- .../components/version-description-modal.tsx | 4 +- .../hooks/use-workflow-execution.ts | 98 +++++-------------- apps/sim/hooks/queries/deployments.ts | 2 +- apps/sim/hooks/use-execution-stream.ts | 18 +--- apps/sim/lib/execution/event-buffer.ts | 16 ++- 8 files changed, 45 insertions(+), 103 deletions(-) diff --git a/apps/sim/app/api/workflows/[id]/deployments/[version]/route.ts b/apps/sim/app/api/workflows/[id]/deployments/[version]/route.ts index 74194eba67..3af21e7581 100644 --- a/apps/sim/app/api/workflows/[id]/deployments/[version]/route.ts +++ b/apps/sim/app/api/workflows/[id]/deployments/[version]/route.ts @@ -29,7 +29,7 @@ const patchBodySchema = z description: z .string() .trim() - .max(500, 'Description must be 500 characters or less') + .max(2000, 'Description must be 2000 characters or less') .nullable() .optional(), isActive: z.literal(true).optional(), // Set to true to activate this version diff --git a/apps/sim/app/api/workflows/[id]/execute/route.ts b/apps/sim/app/api/workflows/[id]/execute/route.ts index 984fecc15f..b6ed6bd8b3 100644 --- a/apps/sim/app/api/workflows/[id]/execute/route.ts +++ b/apps/sim/app/api/workflows/[id]/execute/route.ts @@ -841,14 +841,12 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: const reader = streamingExec.stream.getReader() const decoder = new TextDecoder() - let chunkCount = 0 try { while (true) { const { done, value } = await reader.read() if (done) break - chunkCount++ const chunk = decoder.decode(value, { stream: true }) sendEvent({ type: 'stream:chunk', diff --git a/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts b/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts index da4725b59c..1f77ff391d 100644 --- a/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts +++ b/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts @@ -71,7 +71,6 @@ export async function GET( const encoder = new TextEncoder() - // Hoisted so cancel() can signal the polling loop to stop let closed = false const stream = new ReadableStream({ @@ -89,7 +88,6 @@ export async function GET( } try { - // Replay buffered events const events = await readExecutionEvents(executionId, lastEventId) for (const entry of events) { if (closed) return @@ -97,7 +95,6 @@ export async function GET( lastEventId = entry.eventId } - // Check if execution is already done const currentMeta = await getExecutionMeta(executionId) if (!currentMeta || isTerminalStatus(currentMeta.status)) { enqueue('data: [DONE]\n\n') @@ -105,7 +102,6 @@ export async function GET( return } - // Poll for new events until execution completes or deadline is reached while (!closed && Date.now() < pollDeadline) { await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS)) if (closed) return @@ -119,7 +115,6 @@ export async function GET( const polledMeta = await getExecutionMeta(executionId) if (!polledMeta || isTerminalStatus(polledMeta.status)) { - // One final read to catch any events flushed alongside the meta update const finalEvents = await readExecutionEvents(executionId, lastEventId) for (const entry of finalEvents) { if (closed) return @@ -132,7 +127,6 @@ export async function GET( } } - // Deadline reached — close gracefully if (!closed) { logger.warn('Reconnection stream poll deadline reached', { executionId }) enqueue('data: [DONE]\n\n') diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/general/components/version-description-modal.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/general/components/version-description-modal.tsx index 3cf5106ea7..63606c56a0 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/general/components/version-description-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/general/components/version-description-modal.tsx @@ -113,7 +113,7 @@ export function VersionDescriptionModal({ className='min-h-[120px] resize-none' value={description} onChange={(e) => setDescription(e.target.value)} - maxLength={500} + maxLength={2000} disabled={isGenerating} />
@@ -123,7 +123,7 @@ export function VersionDescriptionModal({

)} {!updateMutation.error && !generateMutation.error &&
} -

{description.length}/500

+

{description.length}/2000

diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts index b3f0d9ac75..1088f8c87f 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts @@ -53,7 +53,6 @@ const logger = createLogger('useWorkflowExecution') */ const activeReconnections = new Set() -// Debug state validation result interface DebugValidationResult { isValid: boolean error?: string @@ -923,10 +922,6 @@ export function useWorkflowExecution() { // Update block logs with actual stream completion times if (result.logs && streamCompletionTimes.size > 0) { - const streamCompletionEndTime = new Date( - Math.max(...Array.from(streamCompletionTimes.values())) - ).toISOString() - result.logs.forEach((log: BlockLog) => { if (streamCompletionTimes.has(log.blockId)) { const completionTime = streamCompletionTimes.get(log.blockId)! @@ -1008,7 +1003,6 @@ export function useWorkflowExecution() { return { success: true, stream } } - // For manual (non-chat) execution const manualExecutionId = uuidv4() try { const result = await executeWorkflow( @@ -1027,7 +1021,6 @@ export function useWorkflowExecution() { return result } catch (error: any) { const errorResult = handleExecutionError(error, { executionId: manualExecutionId }) - // Note: Error logs are already persisted server-side via execution-core.ts return errorResult } }, @@ -1445,7 +1438,6 @@ export function useWorkflowExecution() { } } - // Reset execution state (unless in debug mode — debug session manages its own lifecycle) const workflowExecState = activeWorkflowId ? useExecutionStore.getState().getWorkflowExecution(activeWorkflowId) : null @@ -1529,13 +1521,8 @@ export function useWorkflowExecution() { return executionResult } catch (error: any) { - // Disconnect errors (AbortError, network error) are swallowed by useExecutionStream - // and won't reach here. Only genuine execution failures propagate. if (error.name === 'AbortError' || error.message?.includes('aborted')) { logger.info('Execution aborted by user') - - // Return gracefully with accumulated data (no state reset — - // event callbacks or explicit cancel handle that) return executionResult } @@ -1544,7 +1531,6 @@ export function useWorkflowExecution() { } } - // Fallback: should never reach here throw new Error('Server-side execution is required') } @@ -1776,31 +1762,24 @@ export function useWorkflowExecution() { * Handles cancelling the current workflow execution */ const handleCancelExecution = useCallback(() => { + if (!activeWorkflowId) return logger.info('Workflow execution cancellation requested') - // 1. Read + clear execution ID first so the isStaleExecution guard - // blocks any further SSE callbacks from the old execution. - const storedExecutionId = activeWorkflowId ? getCurrentExecutionId(activeWorkflowId) : null - if (!activeWorkflowId || !storedExecutionId) return - setCurrentExecutionId(activeWorkflowId, null) + const storedExecutionId = getCurrentExecutionId(activeWorkflowId) - // 2. Send cancel signal to server via stored executionId - fetch(`/api/workflows/${activeWorkflowId}/executions/${storedExecutionId}/cancel`, { - method: 'POST', - }).catch(() => {}) + if (storedExecutionId) { + setCurrentExecutionId(activeWorkflowId, null) + fetch(`/api/workflows/${activeWorkflowId}/executions/${storedExecutionId}/cancel`, { + method: 'POST', + }).catch(() => {}) + handleExecutionCancelledConsole({ + workflowId: activeWorkflowId, + executionId: storedExecutionId, + }) + } - // 3. Abort local SSE stream (if still connected) executionStream.cancel(activeWorkflowId) - - // 4. Update terminal: mark running entries as cancelled + add "Execution Cancelled" entry - handleExecutionCancelledConsole({ - workflowId: activeWorkflowId, - executionId: storedExecutionId, - }) - currentChatExecutionIdRef.current = null - - // 5. Reset remaining execution state setIsExecuting(activeWorkflowId, false) setIsDebugging(activeWorkflowId, false) setActiveBlocks(activeWorkflowId, new Set()) @@ -1953,7 +1932,6 @@ export function useWorkflowExecution() { onExecutionCompleted: (data) => { if (data.success) { - // Add the start block (trigger) to executed blocks executedBlockIds.add(blockId) const mergedBlockStates: Record = { @@ -2030,7 +2008,7 @@ export function useWorkflowExecution() { } } finally { const currentId = getCurrentExecutionId(workflowId) - if (currentId !== null) { + if (currentId === null || currentId === executionIdRef.current) { setCurrentExecutionId(workflowId, null) setIsExecuting(workflowId, false) setActiveBlocks(workflowId, new Set()) @@ -2072,30 +2050,19 @@ export function useWorkflowExecution() { const executionId = uuidv4() try { - const result = await executeWorkflow( - undefined, - undefined, - executionId, - undefined, - 'manual', - blockId - ) + await executeWorkflow(undefined, undefined, executionId, undefined, 'manual', blockId) } catch (error) { const errorResult = handleExecutionError(error, { executionId }) return errorResult } finally { - const currentId = getCurrentExecutionId(workflowId) - if (currentId !== null) { - setCurrentExecutionId(workflowId, null) - setIsExecuting(workflowId, false) - setIsDebugging(workflowId, false) - setActiveBlocks(workflowId, new Set()) - } + setCurrentExecutionId(workflowId, null) + setIsExecuting(workflowId, false) + setIsDebugging(workflowId, false) + setActiveBlocks(workflowId, new Set()) } }, [ activeWorkflowId, - getCurrentExecutionId, setCurrentExecutionId, setExecutionResult, setIsExecuting, @@ -2113,19 +2080,11 @@ export function useWorkflowExecution() { ) if (runningEntries.length === 0) return - // Coordination guard: only ONE instance of this hook should start reconnection. - // Multiple components mount useWorkflowExecution simultaneously; the first - // instance to reach here claims the lock, subsequent instances bail out. - // This is separate from currentExecutionId (which is set by normal execution too). if (activeReconnections.has(activeWorkflowId)) return activeReconnections.add(activeWorkflowId) - // Cancel any existing SSE stream for this workflow (SPA nav: old stream may be alive). - // Uses module-level shared AbortControllers, so this cancels streams from ANY instance. executionStream.cancel(activeWorkflowId) - // Pick the most recent execution by startedAt timestamp. - // Old zombie entries from previous executions may still have isRunning=true. const sorted = [...runningEntries].sort((a, b) => { const aTime = a.startedAt ? new Date(a.startedAt).getTime() : 0 const bTime = b.startedAt ? new Date(b.startedAt).getTime() : 0 @@ -2133,7 +2092,6 @@ export function useWorkflowExecution() { }) const executionId = sorted[0].executionId! - // Mark entries from older executions as stale const otherExecutionIds = new Set( sorted.filter((e) => e.executionId !== executionId).map((e) => e.executionId!) ) @@ -2164,17 +2122,13 @@ export function useWorkflowExecution() { includeStartConsoleEntry: true, }) - // Save ALL entries for this execution so we can restore them if reconnection is interrupted. - // clearExecutionEntries removes completed AND running entries, so we must save both. const originalEntries = entries .filter((e) => e.executionId === executionId) .map((e) => ({ ...e })) - // Defer clearing old entries until the first reconnection event arrives. - // This keeps hydrated entries visible during the network round-trip, - // avoiding a flash of empty console. let cleared = false let reconnectionComplete = false + let cleanupRan = false const clearOnce = () => { if (!cleared) { cleared = true @@ -2202,8 +2156,6 @@ export function useWorkflowExecution() { handlers.onBlockError(data) }, onExecutionCompleted: () => { - // Stale check: if another path (e.g. handleCancelExecution) already - // cleared currentExecutionId, skip to avoid duplicate state updates. const currentId = useExecutionStore .getState() .getCurrentExecutionId(reconnectWorkflowId) @@ -2264,9 +2216,14 @@ export function useWorkflowExecution() { }, }) .catch((error) => { + logger.warn('Execution reconnection failed', { executionId, error }) + }) + .finally(() => { + if (reconnectionComplete || cleanupRan) return + const currentId = useExecutionStore.getState().getCurrentExecutionId(reconnectWorkflowId) + if (currentId !== executionId) return reconnectionComplete = true activeReconnections.delete(reconnectWorkflowId) - logger.warn('Execution reconnection failed', { executionId, error }) clearExecutionEntries(executionId) for (const entry of originalEntries) { addConsole({ @@ -2286,13 +2243,10 @@ export function useWorkflowExecution() { }) return () => { + cleanupRan = true executionStream.cancel(reconnectWorkflowId) activeReconnections.delete(reconnectWorkflowId) - // If reconnection was interrupted (clearOnce fired but no terminal event arrived), - // restore the original running entries so the next mount can retry. - // cancel() causes an AbortError which is swallowed by isClientDisconnectError, - // so the .catch() block never fires — we must handle cleanup here. if (cleared && !reconnectionComplete) { clearExecutionEntries(executionId) for (const entry of originalEntries) { diff --git a/apps/sim/hooks/queries/deployments.ts b/apps/sim/hooks/queries/deployments.ts index 894e1152c7..e2f5b5ffee 100644 --- a/apps/sim/hooks/queries/deployments.ts +++ b/apps/sim/hooks/queries/deployments.ts @@ -423,7 +423,7 @@ interface GenerateVersionDescriptionVariables { const VERSION_DESCRIPTION_SYSTEM_PROMPT = `You are writing deployment version descriptions for a workflow automation platform. -Write a brief, factual description (1-3 sentences, under 400 characters) that states what changed between versions. +Write a brief, factual description (1-3 sentences, under 2000 characters) that states what changed between versions. Guidelines: - Use the specific values provided (credential names, channel names, model names) diff --git a/apps/sim/hooks/use-execution-stream.ts b/apps/sim/hooks/use-execution-stream.ts index fac63b013b..2ab98059fb 100644 --- a/apps/sim/hooks/use-execution-stream.ts +++ b/apps/sim/hooks/use-execution-stream.ts @@ -154,13 +154,12 @@ export interface ReconnectStreamOptions { } /** - * Module-level maps shared across all hook instances. - * This ensures ANY instance can cancel streams started by ANY other instance, + * Module-level map shared across all hook instances. + * Ensures ANY instance can cancel streams started by ANY other instance, * which is critical for SPA navigation where the original hook instance unmounts * but the SSE stream must be cancellable from the new instance. */ const sharedAbortControllers = new Map() -const sharedCurrentExecutions = new Map() /** * Hook for executing workflows via server-side SSE streaming. @@ -177,7 +176,6 @@ export function useExecutionStream() { const abortController = new AbortController() sharedAbortControllers.set(workflowId, abortController) - sharedCurrentExecutions.delete(workflowId) try { const response = await fetch(`/api/workflows/${workflowId}/execute`, { @@ -204,7 +202,6 @@ export function useExecutionStream() { const serverExecutionId = response.headers.get('X-Execution-Id') if (serverExecutionId) { - sharedCurrentExecutions.set(workflowId, { workflowId, executionId: serverExecutionId }) onExecutionId?.(serverExecutionId) } @@ -222,11 +219,8 @@ export function useExecutionStream() { }) throw error } finally { - // Only clean up if this is still our controller — a concurrent stream - // (e.g. reconnection) may have replaced it while we were running. if (sharedAbortControllers.get(workflowId) === abortController) { sharedAbortControllers.delete(workflowId) - sharedCurrentExecutions.delete(workflowId) } } }, []) @@ -248,7 +242,6 @@ export function useExecutionStream() { const abortController = new AbortController() sharedAbortControllers.set(workflowId, abortController) - sharedCurrentExecutions.delete(workflowId) try { const response = await fetch(`/api/workflows/${workflowId}/execute`, { @@ -284,7 +277,6 @@ export function useExecutionStream() { const serverExecutionId = response.headers.get('X-Execution-Id') if (serverExecutionId) { - sharedCurrentExecutions.set(workflowId, { workflowId, executionId: serverExecutionId }) onExecutionId?.(serverExecutionId) } @@ -304,7 +296,6 @@ export function useExecutionStream() { } finally { if (sharedAbortControllers.get(workflowId) === abortController) { sharedAbortControllers.delete(workflowId) - sharedCurrentExecutions.delete(workflowId) } } }, []) @@ -319,8 +310,6 @@ export function useExecutionStream() { const abortController = new AbortController() sharedAbortControllers.set(workflowId, abortController) - sharedCurrentExecutions.set(workflowId, { workflowId, executionId }) - try { const response = await fetch( `/api/workflows/${workflowId}/executions/${executionId}/stream?from=${fromEventId}`, @@ -337,7 +326,6 @@ export function useExecutionStream() { } finally { if (sharedAbortControllers.get(workflowId) === abortController) { sharedAbortControllers.delete(workflowId) - sharedCurrentExecutions.delete(workflowId) } } }, []) @@ -349,13 +337,11 @@ export function useExecutionStream() { controller.abort() sharedAbortControllers.delete(workflowId) } - sharedCurrentExecutions.delete(workflowId) } else { for (const [, controller] of sharedAbortControllers) { controller.abort() } sharedAbortControllers.clear() - sharedCurrentExecutions.clear() } }, []) diff --git a/apps/sim/lib/execution/event-buffer.ts b/apps/sim/lib/execution/event-buffer.ts index 95521674b5..4473a922f4 100644 --- a/apps/sim/lib/execution/event-buffer.ts +++ b/apps/sim/lib/execution/event-buffer.ts @@ -158,6 +158,7 @@ export function createExecutionEventWriter(executionId: string): ExecutionEventW let flushPromise: Promise | null = null let closed = false + const inflightWrites = new Set>() const doFlush = async () => { if (pending.length === 0) return @@ -200,7 +201,7 @@ export function createExecutionEventWriter(executionId: string): ExecutionEventW } } - const write = async (event: ExecutionEvent) => { + const writeCore = async (event: ExecutionEvent): Promise => { if (closed) return { eventId: 0, executionId, event } if (nextEventId === 0 || nextEventId > maxReservedId) { await reserveIds(1) @@ -216,17 +217,26 @@ export function createExecutionEventWriter(executionId: string): ExecutionEventW return entry } + const write = (event: ExecutionEvent): Promise => { + const p = writeCore(event) + inflightWrites.add(p) + const remove = () => inflightWrites.delete(p) + p.then(remove, remove) + return p + } + const close = async () => { closed = true if (flushTimer) { clearTimeout(flushTimer) flushTimer = null } - // Wait for any in-flight flush to complete + if (inflightWrites.size > 0) { + await Promise.allSettled(inflightWrites) + } if (flushPromise) { await flushPromise } - // Drain any remaining events that accumulated during the wait if (pending.length > 0) { await doFlush() }