diff --git a/.claude/rules/emcn-components.md b/.claude/rules/emcn-components.md new file mode 100644 index 0000000000..011a3280f4 --- /dev/null +++ b/.claude/rules/emcn-components.md @@ -0,0 +1,35 @@ +--- +paths: + - "apps/sim/components/emcn/**" +--- + +# EMCN Components + +Import from `@/components/emcn`, never from subpaths (except CSS files). + +## CVA vs Direct Styles + +**Use CVA when:** 2+ variants (primary/secondary, sm/md/lg) + +```tsx +const buttonVariants = cva('base-classes', { + variants: { variant: { default: '...', primary: '...' } } +}) +export { Button, buttonVariants } +``` + +**Use direct className when:** Single consistent style, no variations + +```tsx +function Label({ className, ...props }) { + return +} +``` + +## Rules + +- Use Radix UI primitives for accessibility +- Export component and variants (if using CVA) +- TSDoc with usage examples +- Consistent tokens: `font-medium`, `text-[12px]`, `rounded-[4px]` +- `transition-colors` for hover states diff --git a/.claude/rules/global.md b/.claude/rules/global.md new file mode 100644 index 0000000000..e749b67b28 --- /dev/null +++ b/.claude/rules/global.md @@ -0,0 +1,13 @@ +# Global Standards + +## Logging +Import `createLogger` from `sim/logger`. Use `logger.info`, `logger.warn`, `logger.error` instead of `console.log`. + +## Comments +Use TSDoc for documentation. No `====` separators. No non-TSDoc comments. + +## Styling +Never update global styles. Keep all styling local to components. + +## Package Manager +Use `bun` and `bunx`, not `npm` and `npx`. diff --git a/.claude/rules/sim-architecture.md b/.claude/rules/sim-architecture.md new file mode 100644 index 0000000000..d6d7197972 --- /dev/null +++ b/.claude/rules/sim-architecture.md @@ -0,0 +1,56 @@ +--- +paths: + - "apps/sim/**" +--- + +# Sim App Architecture + +## Core Principles +1. **Single Responsibility**: Each component, hook, store has one clear purpose +2. **Composition Over Complexity**: Break down complex logic into smaller pieces +3. **Type Safety First**: TypeScript interfaces for all props, state, return types +4. **Predictable State**: Zustand for global state, useState for UI-only concerns + +## Root-Level Structure + +``` +apps/sim/ +├── app/ # Next.js app router (pages, API routes) +├── blocks/ # Block definitions and registry +├── components/ # Shared UI (emcn/, ui/) +├── executor/ # Workflow execution engine +├── hooks/ # Shared hooks (queries/, selectors/) +├── lib/ # App-wide utilities +├── providers/ # LLM provider integrations +├── stores/ # Zustand stores +├── tools/ # Tool definitions +└── triggers/ # Trigger definitions +``` + +## Feature Organization + +Features live under `app/workspace/[workspaceId]/`: + +``` +feature/ +├── components/ # Feature components +├── hooks/ # Feature-scoped hooks +├── utils/ # Feature-scoped utilities (2+ consumers) +├── feature.tsx # Main component +└── page.tsx # Next.js page entry +``` + +## Naming Conventions +- **Components**: PascalCase (`WorkflowList`) +- **Hooks**: `use` prefix (`useWorkflowOperations`) +- **Files**: kebab-case (`workflow-list.tsx`) +- **Stores**: `stores/feature/store.ts` +- **Constants**: SCREAMING_SNAKE_CASE +- **Interfaces**: PascalCase with suffix (`WorkflowListProps`) + +## Utils Rules + +- **Never create `utils.ts` for single consumer** - inline it +- **Create `utils.ts` when** 2+ files need the same helper +- **Check existing sources** before duplicating (`lib/` has many utilities) +- **Location**: `lib/` (app-wide) → `feature/utils/` (feature-scoped) → inline (single-use) diff --git a/.claude/rules/sim-components.md b/.claude/rules/sim-components.md new file mode 100644 index 0000000000..23799bcda0 --- /dev/null +++ b/.claude/rules/sim-components.md @@ -0,0 +1,48 @@ +--- +paths: + - "apps/sim/**/*.tsx" +--- + +# Component Patterns + +## Structure Order + +```typescript +'use client' // Only if using hooks + +// Imports (external → internal) +// Constants at module level +const CONFIG = { SPACING: 8 } as const + +// Props interface +interface ComponentProps { + requiredProp: string + optionalProp?: boolean +} + +export function Component({ requiredProp, optionalProp = false }: ComponentProps) { + // a. Refs + // b. External hooks (useParams, useRouter) + // c. Store hooks + // d. Custom hooks + // e. Local state + // f. useMemo + // g. useCallback + // h. useEffect + // i. Return JSX +} +``` + +## Rules + +1. `'use client'` only when using React hooks +2. Always define props interface +3. Extract constants with `as const` +4. Semantic HTML (`aside`, `nav`, `article`) +5. Optional chain callbacks: `onAction?.(id)` + +## Component Extraction + +**Extract when:** 50+ lines, used in 2+ files, or has own state/logic + +**Keep inline when:** < 10 lines, single use, purely presentational diff --git a/.claude/rules/sim-hooks.md b/.claude/rules/sim-hooks.md new file mode 100644 index 0000000000..3c06a4a310 --- /dev/null +++ b/.claude/rules/sim-hooks.md @@ -0,0 +1,55 @@ +--- +paths: + - "apps/sim/**/use-*.ts" + - "apps/sim/**/hooks/**/*.ts" +--- + +# Hook Patterns + +## Structure + +```typescript +interface UseFeatureProps { + id: string + onSuccess?: (result: Result) => void +} + +export function useFeature({ id, onSuccess }: UseFeatureProps) { + // 1. Refs for stable dependencies + const idRef = useRef(id) + const onSuccessRef = useRef(onSuccess) + + // 2. State + const [data, setData] = useState(null) + const [isLoading, setIsLoading] = useState(false) + + // 3. Sync refs + useEffect(() => { + idRef.current = id + onSuccessRef.current = onSuccess + }, [id, onSuccess]) + + // 4. Operations (useCallback with empty deps when using refs) + const fetchData = useCallback(async () => { + setIsLoading(true) + try { + const result = await fetch(`/api/${idRef.current}`).then(r => r.json()) + setData(result) + onSuccessRef.current?.(result) + } finally { + setIsLoading(false) + } + }, []) + + return { data, isLoading, fetchData } +} +``` + +## Rules + +1. Single responsibility per hook +2. Props interface required +3. Refs for stable callback dependencies +4. Wrap returned functions in useCallback +5. Always try/catch async operations +6. Track loading/error states diff --git a/.claude/rules/sim-imports.md b/.claude/rules/sim-imports.md new file mode 100644 index 0000000000..b1f1926cd9 --- /dev/null +++ b/.claude/rules/sim-imports.md @@ -0,0 +1,62 @@ +--- +paths: + - "apps/sim/**/*.ts" + - "apps/sim/**/*.tsx" +--- + +# Import Patterns + +## Absolute Imports + +**Always use absolute imports.** Never use relative imports. + +```typescript +// ✓ Good +import { useWorkflowStore } from '@/stores/workflows/store' +import { Button } from '@/components/ui/button' + +// ✗ Bad +import { useWorkflowStore } from '../../../stores/workflows/store' +``` + +## Barrel Exports + +Use barrel exports (`index.ts`) when a folder has 3+ exports. Import from barrel, not individual files. + +```typescript +// ✓ Good +import { Dashboard, Sidebar } from '@/app/workspace/[workspaceId]/logs/components' + +// ✗ Bad +import { Dashboard } from '@/app/workspace/[workspaceId]/logs/components/dashboard/dashboard' +``` + +## No Re-exports + +Do not re-export from non-barrel files. Import directly from the source. + +```typescript +// ✓ Good - import from where it's declared +import { CORE_TRIGGER_TYPES } from '@/stores/logs/filters/types' + +// ✗ Bad - re-exporting in utils.ts then importing from there +import { CORE_TRIGGER_TYPES } from '@/app/workspace/.../utils' +``` + +## Import Order + +1. React/core libraries +2. External libraries +3. UI components (`@/components/emcn`, `@/components/ui`) +4. Utilities (`@/lib/...`) +5. Stores (`@/stores/...`) +6. Feature imports +7. CSS imports + +## Type Imports + +Use `type` keyword for type-only imports: + +```typescript +import type { WorkflowLog } from '@/stores/logs/types' +``` diff --git a/.claude/rules/sim-integrations.md b/.claude/rules/sim-integrations.md new file mode 100644 index 0000000000..cef0c895bd --- /dev/null +++ b/.claude/rules/sim-integrations.md @@ -0,0 +1,209 @@ +--- +paths: + - "apps/sim/tools/**" + - "apps/sim/blocks/**" + - "apps/sim/triggers/**" +--- + +# Adding Integrations + +## Overview + +Adding a new integration typically requires: +1. **Tools** - API operations (`tools/{service}/`) +2. **Block** - UI component (`blocks/blocks/{service}.ts`) +3. **Icon** - SVG icon (`components/icons.tsx`) +4. **Trigger** (optional) - Webhooks/polling (`triggers/{service}/`) + +Always look up the service's API docs first. + +## 1. Tools (`tools/{service}/`) + +``` +tools/{service}/ +├── index.ts # Export all tools +├── types.ts # Params/response types +├── {action}.ts # Individual tool (e.g., send_message.ts) +└── ... +``` + +**Tool file structure:** + +```typescript +// tools/{service}/{action}.ts +import type { {Service}Params, {Service}Response } from '@/tools/{service}/types' +import type { ToolConfig } from '@/tools/types' + +export const {service}{Action}Tool: ToolConfig<{Service}Params, {Service}Response> = { + id: '{service}_{action}', + name: '{Service} {Action}', + description: 'What this tool does', + version: '1.0.0', + oauth: { required: true, provider: '{service}' }, // if OAuth + params: { /* param definitions */ }, + request: { + url: '/api/tools/{service}/{action}', + method: 'POST', + headers: () => ({ 'Content-Type': 'application/json' }), + body: (params) => ({ ...params }), + }, + transformResponse: async (response) => { + const data = await response.json() + if (!data.success) throw new Error(data.error) + return { success: true, output: data.output } + }, + outputs: { /* output definitions */ }, +} +``` + +**Register in `tools/registry.ts`:** + +```typescript +import { {service}{Action}Tool } from '@/tools/{service}' +// Add to registry object +{service}_{action}: {service}{Action}Tool, +``` + +## 2. Block (`blocks/blocks/{service}.ts`) + +```typescript +import { {Service}Icon } from '@/components/icons' +import type { BlockConfig } from '@/blocks/types' +import type { {Service}Response } from '@/tools/{service}/types' + +export const {Service}Block: BlockConfig<{Service}Response> = { + type: '{service}', + name: '{Service}', + description: 'Short description', + longDescription: 'Detailed description', + category: 'tools', + bgColor: '#hexcolor', + icon: {Service}Icon, + subBlocks: [ /* see SubBlock Properties below */ ], + tools: { + access: ['{service}_{action}', ...], + config: { + tool: (params) => `{service}_${params.operation}`, + params: (params) => ({ ...params }), + }, + }, + inputs: { /* input definitions */ }, + outputs: { /* output definitions */ }, +} +``` + +### SubBlock Properties + +```typescript +{ + id: 'fieldName', // Unique identifier + title: 'Field Label', // UI label + type: 'short-input', // See SubBlock Types below + placeholder: 'Hint text', + required: true, // See Required below + condition: { ... }, // See Condition below + dependsOn: ['otherField'], // See DependsOn below + mode: 'basic', // 'basic' | 'advanced' | 'both' | 'trigger' +} +``` + +**SubBlock Types:** `short-input`, `long-input`, `dropdown`, `code`, `switch`, `slider`, `oauth-input`, `channel-selector`, `user-selector`, `file-upload`, etc. + +### `condition` - Show/hide based on another field + +```typescript +// Show when operation === 'send' +condition: { field: 'operation', value: 'send' } + +// Show when operation is 'send' OR 'read' +condition: { field: 'operation', value: ['send', 'read'] } + +// Show when operation !== 'send' +condition: { field: 'operation', value: 'send', not: true } + +// Complex: NOT in list AND another condition +condition: { + field: 'operation', + value: ['list_channels', 'list_users'], + not: true, + and: { field: 'destinationType', value: 'dm', not: true } +} +``` + +### `required` - Field validation + +```typescript +// Always required +required: true + +// Conditionally required (same syntax as condition) +required: { field: 'operation', value: 'send' } +``` + +### `dependsOn` - Clear field when dependencies change + +```typescript +// Clear when credential changes +dependsOn: ['credential'] + +// Clear when authMethod changes AND (credential OR botToken) changes +dependsOn: { all: ['authMethod'], any: ['credential', 'botToken'] } +``` + +### `mode` - When to show field + +- `'basic'` - Only in basic mode (default UI) +- `'advanced'` - Only in advanced mode (manual input) +- `'both'` - Show in both modes (default) +- `'trigger'` - Only when block is used as trigger + +**Register in `blocks/registry.ts`:** + +```typescript +import { {Service}Block } from '@/blocks/blocks/{service}' +// Add to registry object (alphabetically) +{service}: {Service}Block, +``` + +## 3. Icon (`components/icons.tsx`) + +```typescript +export function {Service}Icon(props: SVGProps) { + return ( + + {/* SVG path from service's brand assets */} + + ) +} +``` + +## 4. Trigger (`triggers/{service}/`) - Optional + +``` +triggers/{service}/ +├── index.ts # Export all triggers +├── webhook.ts # Webhook handler +├── utils.ts # Shared utilities +└── {event}.ts # Specific event handlers +``` + +**Register in `triggers/registry.ts`:** + +```typescript +import { {service}WebhookTrigger } from '@/triggers/{service}' +// Add to TRIGGER_REGISTRY +{service}_webhook: {service}WebhookTrigger, +``` + +## Checklist + +- [ ] Look up API docs for the service +- [ ] Create `tools/{service}/types.ts` with proper types +- [ ] Create tool files for each operation +- [ ] Create `tools/{service}/index.ts` barrel export +- [ ] Register tools in `tools/registry.ts` +- [ ] Add icon to `components/icons.tsx` +- [ ] Create block in `blocks/blocks/{service}.ts` +- [ ] Register block in `blocks/registry.ts` +- [ ] (Optional) Create triggers in `triggers/{service}/` +- [ ] (Optional) Register triggers in `triggers/registry.ts` diff --git a/.claude/rules/sim-queries.md b/.claude/rules/sim-queries.md new file mode 100644 index 0000000000..0ca91ac263 --- /dev/null +++ b/.claude/rules/sim-queries.md @@ -0,0 +1,66 @@ +--- +paths: + - "apps/sim/hooks/queries/**/*.ts" +--- + +# React Query Patterns + +All React Query hooks live in `hooks/queries/`. + +## Query Key Factory + +Every query file defines a keys factory: + +```typescript +export const entityKeys = { + all: ['entity'] as const, + list: (workspaceId?: string) => [...entityKeys.all, 'list', workspaceId ?? ''] as const, + detail: (id?: string) => [...entityKeys.all, 'detail', id ?? ''] as const, +} +``` + +## File Structure + +```typescript +// 1. Query keys factory +// 2. Types (if needed) +// 3. Private fetch functions +// 4. Exported hooks +``` + +## Query Hook + +```typescript +export function useEntityList(workspaceId?: string, options?: { enabled?: boolean }) { + return useQuery({ + queryKey: entityKeys.list(workspaceId), + queryFn: () => fetchEntities(workspaceId as string), + enabled: Boolean(workspaceId) && (options?.enabled ?? true), + staleTime: 60 * 1000, + placeholderData: keepPreviousData, + }) +} +``` + +## Mutation Hook + +```typescript +export function useCreateEntity() { + const queryClient = useQueryClient() + return useMutation({ + mutationFn: async (variables) => { /* fetch POST */ }, + onSuccess: () => queryClient.invalidateQueries({ queryKey: entityKeys.all }), + }) +} +``` + +## Optimistic Updates + +For optimistic mutations syncing with Zustand, use `createOptimisticMutationHandlers` from `@/hooks/queries/utils/optimistic-mutation`. + +## Naming + +- **Keys**: `entityKeys` +- **Query hooks**: `useEntity`, `useEntityList` +- **Mutation hooks**: `useCreateEntity`, `useUpdateEntity` +- **Fetch functions**: `fetchEntity` (private) diff --git a/.claude/rules/sim-stores.md b/.claude/rules/sim-stores.md new file mode 100644 index 0000000000..333ff9fd91 --- /dev/null +++ b/.claude/rules/sim-stores.md @@ -0,0 +1,71 @@ +--- +paths: + - "apps/sim/**/store.ts" + - "apps/sim/**/stores/**/*.ts" +--- + +# Zustand Store Patterns + +Stores live in `stores/`. Complex stores split into `store.ts` + `types.ts`. + +## Basic Store + +```typescript +import { create } from 'zustand' +import { devtools } from 'zustand/middleware' +import type { FeatureState } from '@/stores/feature/types' + +const initialState = { items: [] as Item[], activeId: null as string | null } + +export const useFeatureStore = create()( + devtools( + (set, get) => ({ + ...initialState, + setItems: (items) => set({ items }), + addItem: (item) => set((state) => ({ items: [...state.items, item] })), + reset: () => set(initialState), + }), + { name: 'feature-store' } + ) +) +``` + +## Persisted Store + +```typescript +import { create } from 'zustand' +import { persist } from 'zustand/middleware' + +export const useFeatureStore = create()( + persist( + (set) => ({ + width: 300, + setWidth: (width) => set({ width }), + _hasHydrated: false, + setHasHydrated: (v) => set({ _hasHydrated: v }), + }), + { + name: 'feature-state', + partialize: (state) => ({ width: state.width }), + onRehydrateStorage: () => (state) => state?.setHasHydrated(true), + } + ) +) +``` + +## Rules + +1. Use `devtools` middleware (named stores) +2. Use `persist` only when data should survive reload +3. `partialize` to persist only necessary state +4. `_hasHydrated` pattern for persisted stores needing hydration tracking +5. Immutable updates only +6. `set((state) => ...)` when depending on previous state +7. Provide `reset()` action + +## Outside React + +```typescript +const items = useFeatureStore.getState().items +useFeatureStore.setState({ items: newItems }) +``` diff --git a/.claude/rules/sim-styling.md b/.claude/rules/sim-styling.md new file mode 100644 index 0000000000..1b8c384a70 --- /dev/null +++ b/.claude/rules/sim-styling.md @@ -0,0 +1,41 @@ +--- +paths: + - "apps/sim/**/*.tsx" + - "apps/sim/**/*.css" +--- + +# Styling Rules + +## Tailwind + +1. **No inline styles** - Use Tailwind classes +2. **No duplicate dark classes** - Skip `dark:` when value matches light mode +3. **Exact values** - `text-[14px]`, `h-[26px]` +4. **Transitions** - `transition-colors` for interactive states + +## Conditional Classes + +```typescript +import { cn } from '@/lib/utils' + +
+``` + +## CSS Variables + +For dynamic values (widths, heights) synced with stores: + +```typescript +// In store +setWidth: (width) => { + set({ width }) + document.documentElement.style.setProperty('--sidebar-width', `${width}px`) +} + +// In component +
+ +
+

+ Loading +

+

+ Validating your unsubscribe link... +

+
+
+ +
+ +
} > diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/create-chunk-modal/create-chunk-modal.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/create-chunk-modal/create-chunk-modal.tsx index 0bce7c5885..0a9125f92c 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/create-chunk-modal/create-chunk-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/create-chunk-modal/create-chunk-modal.tsx @@ -2,7 +2,6 @@ import { useRef, useState } from 'react' import { createLogger } from '@sim/logger' -import { useQueryClient } from '@tanstack/react-query' import { Button, Label, @@ -14,7 +13,7 @@ import { Textarea, } from '@/components/emcn' import type { DocumentData } from '@/lib/knowledge/types' -import { knowledgeKeys } from '@/hooks/queries/knowledge' +import { useCreateChunk } from '@/hooks/queries/knowledge' const logger = createLogger('CreateChunkModal') @@ -31,16 +30,20 @@ export function CreateChunkModal({ document, knowledgeBaseId, }: CreateChunkModalProps) { - const queryClient = useQueryClient() + const { + mutate: createChunk, + isPending: isCreating, + error: mutationError, + reset: resetMutation, + } = useCreateChunk() const [content, setContent] = useState('') - const [isCreating, setIsCreating] = useState(false) - const [error, setError] = useState(null) const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false) const isProcessingRef = useRef(false) + const error = mutationError?.message ?? null const hasUnsavedChanges = content.trim().length > 0 - const handleCreateChunk = async () => { + const handleCreateChunk = () => { if (!document || content.trim().length === 0 || isProcessingRef.current) { if (isProcessingRef.current) { logger.warn('Chunk creation already in progress, ignoring duplicate request') @@ -48,57 +51,32 @@ export function CreateChunkModal({ return } - try { - isProcessingRef.current = true - setIsCreating(true) - setError(null) - - const response = await fetch( - `/api/knowledge/${knowledgeBaseId}/documents/${document.id}/chunks`, - { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - content: content.trim(), - enabled: true, - }), - } - ) - - if (!response.ok) { - const result = await response.json() - throw new Error(result.error || 'Failed to create chunk') + isProcessingRef.current = true + + createChunk( + { + knowledgeBaseId, + documentId: document.id, + content: content.trim(), + enabled: true, + }, + { + onSuccess: () => { + isProcessingRef.current = false + onClose() + }, + onError: () => { + isProcessingRef.current = false + }, } - - const result = await response.json() - - if (result.success && result.data) { - logger.info('Chunk created successfully:', result.data.id) - - await queryClient.invalidateQueries({ - queryKey: knowledgeKeys.detail(knowledgeBaseId), - }) - - onClose() - } else { - throw new Error(result.error || 'Failed to create chunk') - } - } catch (err) { - logger.error('Error creating chunk:', err) - setError(err instanceof Error ? err.message : 'An error occurred') - } finally { - isProcessingRef.current = false - setIsCreating(false) - } + ) } const onClose = () => { onOpenChange(false) setContent('') - setError(null) setShowUnsavedChangesAlert(false) + resetMutation() } const handleCloseAttempt = () => { diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/delete-chunk-modal/delete-chunk-modal.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/delete-chunk-modal/delete-chunk-modal.tsx index ff841ddec9..fcebce6b8b 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/delete-chunk-modal/delete-chunk-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/delete-chunk-modal/delete-chunk-modal.tsx @@ -1,13 +1,8 @@ 'use client' -import { useState } from 'react' -import { createLogger } from '@sim/logger' -import { useQueryClient } from '@tanstack/react-query' import { Button, Modal, ModalBody, ModalContent, ModalFooter, ModalHeader } from '@/components/emcn' import type { ChunkData } from '@/lib/knowledge/types' -import { knowledgeKeys } from '@/hooks/queries/knowledge' - -const logger = createLogger('DeleteChunkModal') +import { useDeleteChunk } from '@/hooks/queries/knowledge' interface DeleteChunkModalProps { chunk: ChunkData | null @@ -24,44 +19,12 @@ export function DeleteChunkModal({ isOpen, onClose, }: DeleteChunkModalProps) { - const queryClient = useQueryClient() - const [isDeleting, setIsDeleting] = useState(false) + const { mutate: deleteChunk, isPending: isDeleting } = useDeleteChunk() - const handleDeleteChunk = async () => { + const handleDeleteChunk = () => { if (!chunk || isDeleting) return - try { - setIsDeleting(true) - - const response = await fetch( - `/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks/${chunk.id}`, - { - method: 'DELETE', - } - ) - - if (!response.ok) { - throw new Error('Failed to delete chunk') - } - - const result = await response.json() - - if (result.success) { - logger.info('Chunk deleted successfully:', chunk.id) - - await queryClient.invalidateQueries({ - queryKey: knowledgeKeys.detail(knowledgeBaseId), - }) - - onClose() - } else { - throw new Error(result.error || 'Failed to delete chunk') - } - } catch (err) { - logger.error('Error deleting chunk:', err) - } finally { - setIsDeleting(false) - } + deleteChunk({ knowledgeBaseId, documentId, chunkId: chunk.id }, { onSuccess: onClose }) } if (!chunk) return null diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/document-tags-modal/document-tags-modal.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/document-tags-modal/document-tags-modal.tsx index d4397ba700..13c01e2233 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/document-tags-modal/document-tags-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/document-tags-modal/document-tags-modal.tsx @@ -25,6 +25,7 @@ import { } from '@/hooks/kb/use-knowledge-base-tag-definitions' import { useNextAvailableSlot } from '@/hooks/kb/use-next-available-slot' import { type TagDefinitionInput, useTagDefinitions } from '@/hooks/kb/use-tag-definitions' +import { useUpdateDocumentTags } from '@/hooks/queries/knowledge' const logger = createLogger('DocumentTagsModal') @@ -58,8 +59,6 @@ function formatValueForDisplay(value: string, fieldType: string): string { try { const date = new Date(value) if (Number.isNaN(date.getTime())) return value - // For UTC dates, display the UTC date to prevent timezone shifts - // e.g., 2002-05-16T00:00:00.000Z should show as "May 16, 2002" not "May 15, 2002" if (typeof value === 'string' && (value.endsWith('Z') || /[+-]\d{2}:\d{2}$/.test(value))) { return new Date( date.getUTCFullYear(), @@ -96,6 +95,7 @@ export function DocumentTagsModal({ const documentTagHook = useTagDefinitions(knowledgeBaseId, documentId) const kbTagHook = useKnowledgeBaseTagDefinitions(knowledgeBaseId) const { getNextAvailableSlot: getServerNextSlot } = useNextAvailableSlot(knowledgeBaseId) + const { mutateAsync: updateDocumentTags } = useUpdateDocumentTags() const { saveTagDefinitions, tagDefinitions, fetchTagDefinitions } = documentTagHook const { tagDefinitions: kbTagDefinitions, fetchTagDefinitions: refreshTagDefinitions } = kbTagHook @@ -118,7 +118,6 @@ export function DocumentTagsModal({ const definition = definitions.find((def) => def.tagSlot === slot) if (rawValue !== null && rawValue !== undefined && definition) { - // Convert value to string for storage const stringValue = String(rawValue).trim() if (stringValue) { tags.push({ @@ -142,41 +141,34 @@ export function DocumentTagsModal({ async (tagsToSave: DocumentTag[]) => { if (!documentData) return - try { - const tagData: Record = {} - - // Only include tags that have values (omit empty ones) - // Use empty string for slots that should be cleared - ALL_TAG_SLOTS.forEach((slot) => { - const tag = tagsToSave.find((t) => t.slot === slot) - if (tag?.value.trim()) { - tagData[slot] = tag.value.trim() - } else { - // Use empty string to clear a tag (API schema expects string, not null) - tagData[slot] = '' - } - }) - - const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`, { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(tagData), - }) - - if (!response.ok) { - throw new Error('Failed to update document tags') + const tagData: Record = {} + + ALL_TAG_SLOTS.forEach((slot) => { + const tag = tagsToSave.find((t) => t.slot === slot) + if (tag?.value.trim()) { + tagData[slot] = tag.value.trim() + } else { + tagData[slot] = '' } + }) - onDocumentUpdate?.(tagData as Record) - await fetchTagDefinitions() - } catch (error) { - logger.error('Error updating document tags:', error) - throw error - } + await updateDocumentTags({ + knowledgeBaseId, + documentId, + tags: tagData, + }) + + onDocumentUpdate?.(tagData) + await fetchTagDefinitions() }, - [documentData, knowledgeBaseId, documentId, fetchTagDefinitions, onDocumentUpdate] + [ + documentData, + knowledgeBaseId, + documentId, + updateDocumentTags, + fetchTagDefinitions, + onDocumentUpdate, + ] ) const handleRemoveTag = async (index: number) => { diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/edit-chunk-modal/edit-chunk-modal.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/edit-chunk-modal/edit-chunk-modal.tsx index 60aa328f31..9148ca5c76 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/edit-chunk-modal/edit-chunk-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/edit-chunk-modal/edit-chunk-modal.tsx @@ -2,7 +2,6 @@ import { useEffect, useMemo, useRef, useState } from 'react' import { createLogger } from '@sim/logger' -import { useQueryClient } from '@tanstack/react-query' import { ChevronDown, ChevronUp } from 'lucide-react' import { Button, @@ -19,7 +18,7 @@ import { import type { ChunkData, DocumentData } from '@/lib/knowledge/types' import { getAccurateTokenCount, getTokenStrings } from '@/lib/tokenization/estimators' import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider' -import { knowledgeKeys } from '@/hooks/queries/knowledge' +import { useUpdateChunk } from '@/hooks/queries/knowledge' const logger = createLogger('EditChunkModal') @@ -50,17 +49,22 @@ export function EditChunkModal({ onNavigateToPage, maxChunkSize, }: EditChunkModalProps) { - const queryClient = useQueryClient() const userPermissions = useUserPermissionsContext() + const { + mutate: updateChunk, + isPending: isSaving, + error: mutationError, + reset: resetMutation, + } = useUpdateChunk() const [editedContent, setEditedContent] = useState(chunk?.content || '') - const [isSaving, setIsSaving] = useState(false) const [isNavigating, setIsNavigating] = useState(false) - const [error, setError] = useState(null) const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false) const [pendingNavigation, setPendingNavigation] = useState<(() => void) | null>(null) const [tokenizerOn, setTokenizerOn] = useState(false) const textareaRef = useRef(null) + const error = mutationError?.message ?? null + const hasUnsavedChanges = editedContent !== (chunk?.content || '') const tokenStrings = useMemo(() => { @@ -102,44 +106,15 @@ export function EditChunkModal({ const canNavigatePrev = currentChunkIndex > 0 || currentPage > 1 const canNavigateNext = currentChunkIndex < allChunks.length - 1 || currentPage < totalPages - const handleSaveContent = async () => { + const handleSaveContent = () => { if (!chunk || !document) return - try { - setIsSaving(true) - setError(null) - - const response = await fetch( - `/api/knowledge/${knowledgeBaseId}/documents/${document.id}/chunks/${chunk.id}`, - { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - content: editedContent, - }), - } - ) - - if (!response.ok) { - const result = await response.json() - throw new Error(result.error || 'Failed to update chunk') - } - - const result = await response.json() - - if (result.success) { - await queryClient.invalidateQueries({ - queryKey: knowledgeKeys.detail(knowledgeBaseId), - }) - } - } catch (err) { - logger.error('Error updating chunk:', err) - setError(err instanceof Error ? err.message : 'An error occurred') - } finally { - setIsSaving(false) - } + updateChunk({ + knowledgeBaseId, + documentId: document.id, + chunkId: chunk.id, + content: editedContent, + }) } const navigateToChunk = async (direction: 'prev' | 'next') => { @@ -165,7 +140,6 @@ export function EditChunkModal({ } } catch (err) { logger.error(`Error navigating ${direction}:`, err) - setError(`Failed to navigate to ${direction === 'prev' ? 'previous' : 'next'} chunk`) } finally { setIsNavigating(false) } @@ -185,6 +159,7 @@ export function EditChunkModal({ setPendingNavigation(null) setShowUnsavedChangesAlert(true) } else { + resetMutation() onClose() } } @@ -195,6 +170,7 @@ export function EditChunkModal({ void pendingNavigation() setPendingNavigation(null) } else { + resetMutation() onClose() } } diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/document.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/document.tsx index 7c724a1779..f32b37fd0c 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/document.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/document.tsx @@ -48,7 +48,13 @@ import { ActionBar } from '@/app/workspace/[workspaceId]/knowledge/[id]/componen import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider' import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks' import { useDocument, useDocumentChunks, useKnowledgeBase } from '@/hooks/kb/use-knowledge' -import { knowledgeKeys, useDocumentChunkSearchQuery } from '@/hooks/queries/knowledge' +import { + knowledgeKeys, + useBulkChunkOperation, + useDeleteDocument, + useDocumentChunkSearchQuery, + useUpdateChunk, +} from '@/hooks/queries/knowledge' const logger = createLogger('Document') @@ -403,11 +409,13 @@ export function Document({ const [isCreateChunkModalOpen, setIsCreateChunkModalOpen] = useState(false) const [chunkToDelete, setChunkToDelete] = useState(null) const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false) - const [isBulkOperating, setIsBulkOperating] = useState(false) const [showDeleteDocumentDialog, setShowDeleteDocumentDialog] = useState(false) - const [isDeletingDocument, setIsDeletingDocument] = useState(false) const [contextMenuChunk, setContextMenuChunk] = useState(null) + const { mutate: updateChunkMutation } = useUpdateChunk() + const { mutate: deleteDocumentMutation, isPending: isDeletingDocument } = useDeleteDocument() + const { mutate: bulkChunkMutation, isPending: isBulkOperating } = useBulkChunkOperation() + const { isOpen: isContextMenuOpen, position: contextMenuPosition, @@ -440,36 +448,23 @@ export function Document({ setSelectedChunk(null) } - const handleToggleEnabled = async (chunkId: string) => { + const handleToggleEnabled = (chunkId: string) => { const chunk = displayChunks.find((c) => c.id === chunkId) if (!chunk) return - try { - const response = await fetch( - `/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks/${chunkId}`, - { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - enabled: !chunk.enabled, - }), - } - ) - - if (!response.ok) { - throw new Error('Failed to update chunk') - } - - const result = await response.json() - - if (result.success) { - updateChunk(chunkId, { enabled: !chunk.enabled }) + updateChunkMutation( + { + knowledgeBaseId, + documentId, + chunkId, + enabled: !chunk.enabled, + }, + { + onSuccess: () => { + updateChunk(chunkId, { enabled: !chunk.enabled }) + }, } - } catch (err) { - logger.error('Error updating chunk:', err) - } + ) } const handleDeleteChunk = (chunkId: string) => { @@ -515,107 +510,65 @@ export function Document({ /** * Handles deleting the document */ - const handleDeleteDocument = async () => { + const handleDeleteDocument = () => { if (!documentData) return - try { - setIsDeletingDocument(true) - - const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`, { - method: 'DELETE', - }) - - if (!response.ok) { - throw new Error('Failed to delete document') - } - - const result = await response.json() - - if (result.success) { - await queryClient.invalidateQueries({ - queryKey: knowledgeKeys.detail(knowledgeBaseId), - }) - - router.push(`/workspace/${workspaceId}/knowledge/${knowledgeBaseId}`) - } else { - throw new Error(result.error || 'Failed to delete document') + deleteDocumentMutation( + { knowledgeBaseId, documentId }, + { + onSuccess: () => { + router.push(`/workspace/${workspaceId}/knowledge/${knowledgeBaseId}`) + }, } - } catch (err) { - logger.error('Error deleting document:', err) - setIsDeletingDocument(false) - } + ) } - const performBulkChunkOperation = async ( + const performBulkChunkOperation = ( operation: 'enable' | 'disable' | 'delete', chunks: ChunkData[] ) => { if (chunks.length === 0) return - try { - setIsBulkOperating(true) - - const response = await fetch( - `/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks`, - { - method: 'PATCH', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - operation, - chunkIds: chunks.map((chunk) => chunk.id), - }), - } - ) - - if (!response.ok) { - throw new Error(`Failed to ${operation} chunks`) - } - - const result = await response.json() - - if (result.success) { - if (operation === 'delete') { - await refreshChunks() - } else { - result.data.results.forEach((opResult: any) => { - if (opResult.operation === operation) { - opResult.chunkIds.forEach((chunkId: string) => { - updateChunk(chunkId, { enabled: operation === 'enable' }) - }) - } - }) - } - - logger.info(`Successfully ${operation}d ${result.data.successCount} chunks`) + bulkChunkMutation( + { + knowledgeBaseId, + documentId, + operation, + chunkIds: chunks.map((chunk) => chunk.id), + }, + { + onSuccess: (result) => { + if (operation === 'delete' || result.errorCount > 0) { + refreshChunks() + } else { + chunks.forEach((chunk) => { + updateChunk(chunk.id, { enabled: operation === 'enable' }) + }) + } + logger.info(`Successfully ${operation}d ${result.successCount} chunks`) + setSelectedChunks(new Set()) + }, } - - setSelectedChunks(new Set()) - } catch (err) { - logger.error(`Error ${operation}ing chunks:`, err) - } finally { - setIsBulkOperating(false) - } + ) } - const handleBulkEnable = async () => { + const handleBulkEnable = () => { const chunksToEnable = displayChunks.filter( (chunk) => selectedChunks.has(chunk.id) && !chunk.enabled ) - await performBulkChunkOperation('enable', chunksToEnable) + performBulkChunkOperation('enable', chunksToEnable) } - const handleBulkDisable = async () => { + const handleBulkDisable = () => { const chunksToDisable = displayChunks.filter( (chunk) => selectedChunks.has(chunk.id) && chunk.enabled ) - await performBulkChunkOperation('disable', chunksToDisable) + performBulkChunkOperation('disable', chunksToDisable) } - const handleBulkDelete = async () => { + const handleBulkDelete = () => { const chunksToDelete = displayChunks.filter((chunk) => selectedChunks.has(chunk.id)) - await performBulkChunkOperation('delete', chunksToDelete) + performBulkChunkOperation('delete', chunksToDelete) } const selectedChunksList = displayChunks.filter((chunk) => selectedChunks.has(chunk.id)) diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/base.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/base.tsx index da1f19e54e..81d30f53d9 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/base.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/base.tsx @@ -2,7 +2,6 @@ import { useCallback, useEffect, useRef, useState } from 'react' import { createLogger } from '@sim/logger' -import { useQueryClient } from '@tanstack/react-query' import { format } from 'date-fns' import { AlertCircle, @@ -62,7 +61,12 @@ import { type TagDefinition, useKnowledgeBaseTagDefinitions, } from '@/hooks/kb/use-knowledge-base-tag-definitions' -import { knowledgeKeys } from '@/hooks/queries/knowledge' +import { + useBulkDocumentOperation, + useDeleteDocument, + useDeleteKnowledgeBase, + useUpdateDocument, +} from '@/hooks/queries/knowledge' const logger = createLogger('KnowledgeBase') @@ -407,12 +411,17 @@ export function KnowledgeBase({ id, knowledgeBaseName: passedKnowledgeBaseName, }: KnowledgeBaseProps) { - const queryClient = useQueryClient() const params = useParams() const workspaceId = params.workspaceId as string const { removeKnowledgeBase } = useKnowledgeBasesList(workspaceId, { enabled: false }) const userPermissions = useUserPermissionsContext() + const { mutate: updateDocumentMutation } = useUpdateDocument() + const { mutate: deleteDocumentMutation } = useDeleteDocument() + const { mutate: deleteKnowledgeBaseMutation, isPending: isDeleting } = + useDeleteKnowledgeBase(workspaceId) + const { mutate: bulkDocumentMutation, isPending: isBulkOperating } = useBulkDocumentOperation() + const [searchQuery, setSearchQuery] = useState('') const [showTagsModal, setShowTagsModal] = useState(false) @@ -427,8 +436,6 @@ export function KnowledgeBase({ const [selectedDocuments, setSelectedDocuments] = useState>(new Set()) const [showDeleteDialog, setShowDeleteDialog] = useState(false) const [showAddDocumentsModal, setShowAddDocumentsModal] = useState(false) - const [isDeleting, setIsDeleting] = useState(false) - const [isBulkOperating, setIsBulkOperating] = useState(false) const [showDeleteDocumentModal, setShowDeleteDocumentModal] = useState(false) const [documentToDelete, setDocumentToDelete] = useState(null) const [showBulkDeleteModal, setShowBulkDeleteModal] = useState(false) @@ -550,7 +557,7 @@ export function KnowledgeBase({ /** * Checks for documents with stale processing states and marks them as failed */ - const checkForDeadProcesses = async () => { + const checkForDeadProcesses = () => { const now = new Date() const DEAD_PROCESS_THRESHOLD_MS = 600 * 1000 // 10 minutes @@ -567,116 +574,79 @@ export function KnowledgeBase({ logger.warn(`Found ${staleDocuments.length} documents with dead processes`) - const markFailedPromises = staleDocuments.map(async (doc) => { - try { - const response = await fetch(`/api/knowledge/${id}/documents/${doc.id}`, { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', + staleDocuments.forEach((doc) => { + updateDocumentMutation( + { + knowledgeBaseId: id, + documentId: doc.id, + updates: { markFailedDueToTimeout: true }, + }, + { + onSuccess: () => { + logger.info(`Successfully marked dead process as failed for document: ${doc.filename}`) }, - body: JSON.stringify({ - markFailedDueToTimeout: true, - }), - }) - - if (!response.ok) { - const errorData = await response.json().catch(() => ({ error: 'Unknown error' })) - logger.error(`Failed to mark document ${doc.id} as failed: ${errorData.error}`) - return - } - - const result = await response.json() - if (result.success) { - logger.info(`Successfully marked dead process as failed for document: ${doc.filename}`) } - } catch (error) { - logger.error(`Error marking document ${doc.id} as failed:`, error) - } + ) }) - - await Promise.allSettled(markFailedPromises) } - const handleToggleEnabled = async (docId: string) => { + const handleToggleEnabled = (docId: string) => { const document = documents.find((doc) => doc.id === docId) if (!document) return const newEnabled = !document.enabled + // Optimistic update updateDocument(docId, { enabled: newEnabled }) - try { - const response = await fetch(`/api/knowledge/${id}/documents/${docId}`, { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', + updateDocumentMutation( + { + knowledgeBaseId: id, + documentId: docId, + updates: { enabled: newEnabled }, + }, + { + onError: () => { + // Rollback on error + updateDocument(docId, { enabled: !newEnabled }) }, - body: JSON.stringify({ - enabled: newEnabled, - }), - }) - - if (!response.ok) { - throw new Error('Failed to update document') - } - - const result = await response.json() - - if (!result.success) { - updateDocument(docId, { enabled: !newEnabled }) } - } catch (err) { - updateDocument(docId, { enabled: !newEnabled }) - logger.error('Error updating document:', err) - } + ) } /** * Handles retrying a failed document processing */ - const handleRetryDocument = async (docId: string) => { - try { - updateDocument(docId, { - processingStatus: 'pending', - processingError: null, - processingStartedAt: null, - processingCompletedAt: null, - }) - - const response = await fetch(`/api/knowledge/${id}/documents/${docId}`, { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - retryProcessing: true, - }), - }) - - if (!response.ok) { - throw new Error('Failed to retry document processing') - } - - const result = await response.json() - - if (!result.success) { - throw new Error(result.error || 'Failed to retry document processing') - } + const handleRetryDocument = (docId: string) => { + // Optimistic update + updateDocument(docId, { + processingStatus: 'pending', + processingError: null, + processingStartedAt: null, + processingCompletedAt: null, + }) - await refreshDocuments() - - logger.info(`Document retry initiated successfully for: ${docId}`) - } catch (err) { - logger.error('Error retrying document:', err) - const currentDoc = documents.find((doc) => doc.id === docId) - if (currentDoc) { - updateDocument(docId, { - processingStatus: 'failed', - processingError: - err instanceof Error ? err.message : 'Failed to retry document processing', - }) + updateDocumentMutation( + { + knowledgeBaseId: id, + documentId: docId, + updates: { retryProcessing: true }, + }, + { + onSuccess: () => { + refreshDocuments() + logger.info(`Document retry initiated successfully for: ${docId}`) + }, + onError: (err) => { + logger.error('Error retrying document:', err) + updateDocument(docId, { + processingStatus: 'failed', + processingError: + err instanceof Error ? err.message : 'Failed to retry document processing', + }) + }, } - } + ) } /** @@ -694,43 +664,32 @@ export function KnowledgeBase({ const currentDoc = documents.find((doc) => doc.id === documentId) const previousName = currentDoc?.filename + // Optimistic update updateDocument(documentId, { filename: newName }) - queryClient.setQueryData(knowledgeKeys.document(id, documentId), (previous) => - previous ? { ...previous, filename: newName } : previous - ) - try { - const response = await fetch(`/api/knowledge/${id}/documents/${documentId}`, { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', + return new Promise((resolve, reject) => { + updateDocumentMutation( + { + knowledgeBaseId: id, + documentId, + updates: { filename: newName }, }, - body: JSON.stringify({ filename: newName }), - }) - - if (!response.ok) { - const result = await response.json() - throw new Error(result.error || 'Failed to rename document') - } - - const result = await response.json() - - if (!result.success) { - throw new Error(result.error || 'Failed to rename document') - } - - logger.info(`Document renamed: ${documentId}`) - } catch (err) { - if (previousName !== undefined) { - updateDocument(documentId, { filename: previousName }) - queryClient.setQueryData( - knowledgeKeys.document(id, documentId), - (previous) => (previous ? { ...previous, filename: previousName } : previous) - ) - } - logger.error('Error renaming document:', err) - throw err - } + { + onSuccess: () => { + logger.info(`Document renamed: ${documentId}`) + resolve() + }, + onError: (err) => { + // Rollback on error + if (previousName !== undefined) { + updateDocument(documentId, { filename: previousName }) + } + logger.error('Error renaming document:', err) + reject(err) + }, + } + ) + }) } /** @@ -744,35 +703,26 @@ export function KnowledgeBase({ /** * Confirms and executes the deletion of a single document */ - const confirmDeleteDocument = async () => { + const confirmDeleteDocument = () => { if (!documentToDelete) return - try { - const response = await fetch(`/api/knowledge/${id}/documents/${documentToDelete}`, { - method: 'DELETE', - }) - - if (!response.ok) { - throw new Error('Failed to delete document') - } - - const result = await response.json() - - if (result.success) { - refreshDocuments() - - setSelectedDocuments((prev) => { - const newSet = new Set(prev) - newSet.delete(documentToDelete) - return newSet - }) + deleteDocumentMutation( + { knowledgeBaseId: id, documentId: documentToDelete }, + { + onSuccess: () => { + refreshDocuments() + setSelectedDocuments((prev) => { + const newSet = new Set(prev) + newSet.delete(documentToDelete) + return newSet + }) + }, + onSettled: () => { + setShowDeleteDocumentModal(false) + setDocumentToDelete(null) + }, } - } catch (err) { - logger.error('Error deleting document:', err) - } finally { - setShowDeleteDocumentModal(false) - setDocumentToDelete(null) - } + ) } /** @@ -818,32 +768,18 @@ export function KnowledgeBase({ /** * Handles deleting the entire knowledge base */ - const handleDeleteKnowledgeBase = async () => { + const handleDeleteKnowledgeBase = () => { if (!knowledgeBase) return - try { - setIsDeleting(true) - - const response = await fetch(`/api/knowledge/${id}`, { - method: 'DELETE', - }) - - if (!response.ok) { - throw new Error('Failed to delete knowledge base') - } - - const result = await response.json() - - if (result.success) { - removeKnowledgeBase(id) - router.push(`/workspace/${workspaceId}/knowledge`) - } else { - throw new Error(result.error || 'Failed to delete knowledge base') + deleteKnowledgeBaseMutation( + { knowledgeBaseId: id }, + { + onSuccess: () => { + removeKnowledgeBase(id) + router.push(`/workspace/${workspaceId}/knowledge`) + }, } - } catch (err) { - logger.error('Error deleting knowledge base:', err) - setIsDeleting(false) - } + ) } /** @@ -856,93 +792,57 @@ export function KnowledgeBase({ /** * Handles bulk enabling of selected documents */ - const handleBulkEnable = async () => { + const handleBulkEnable = () => { const documentsToEnable = documents.filter( (doc) => selectedDocuments.has(doc.id) && !doc.enabled ) if (documentsToEnable.length === 0) return - try { - setIsBulkOperating(true) - - const response = await fetch(`/api/knowledge/${id}/documents`, { - method: 'PATCH', - headers: { - 'Content-Type': 'application/json', + bulkDocumentMutation( + { + knowledgeBaseId: id, + operation: 'enable', + documentIds: documentsToEnable.map((doc) => doc.id), + }, + { + onSuccess: (result) => { + result.updatedDocuments?.forEach((updatedDoc) => { + updateDocument(updatedDoc.id, { enabled: updatedDoc.enabled }) + }) + logger.info(`Successfully enabled ${result.successCount} documents`) + setSelectedDocuments(new Set()) }, - body: JSON.stringify({ - operation: 'enable', - documentIds: documentsToEnable.map((doc) => doc.id), - }), - }) - - if (!response.ok) { - throw new Error('Failed to enable documents') } - - const result = await response.json() - - if (result.success) { - result.data.updatedDocuments.forEach((updatedDoc: { id: string; enabled: boolean }) => { - updateDocument(updatedDoc.id, { enabled: updatedDoc.enabled }) - }) - - logger.info(`Successfully enabled ${result.data.successCount} documents`) - } - - setSelectedDocuments(new Set()) - } catch (err) { - logger.error('Error enabling documents:', err) - } finally { - setIsBulkOperating(false) - } + ) } /** * Handles bulk disabling of selected documents */ - const handleBulkDisable = async () => { + const handleBulkDisable = () => { const documentsToDisable = documents.filter( (doc) => selectedDocuments.has(doc.id) && doc.enabled ) if (documentsToDisable.length === 0) return - try { - setIsBulkOperating(true) - - const response = await fetch(`/api/knowledge/${id}/documents`, { - method: 'PATCH', - headers: { - 'Content-Type': 'application/json', + bulkDocumentMutation( + { + knowledgeBaseId: id, + operation: 'disable', + documentIds: documentsToDisable.map((doc) => doc.id), + }, + { + onSuccess: (result) => { + result.updatedDocuments?.forEach((updatedDoc) => { + updateDocument(updatedDoc.id, { enabled: updatedDoc.enabled }) + }) + logger.info(`Successfully disabled ${result.successCount} documents`) + setSelectedDocuments(new Set()) }, - body: JSON.stringify({ - operation: 'disable', - documentIds: documentsToDisable.map((doc) => doc.id), - }), - }) - - if (!response.ok) { - throw new Error('Failed to disable documents') } - - const result = await response.json() - - if (result.success) { - result.data.updatedDocuments.forEach((updatedDoc: { id: string; enabled: boolean }) => { - updateDocument(updatedDoc.id, { enabled: updatedDoc.enabled }) - }) - - logger.info(`Successfully disabled ${result.data.successCount} documents`) - } - - setSelectedDocuments(new Set()) - } catch (err) { - logger.error('Error disabling documents:', err) - } finally { - setIsBulkOperating(false) - } + ) } /** @@ -956,44 +856,28 @@ export function KnowledgeBase({ /** * Confirms and executes the bulk deletion of selected documents */ - const confirmBulkDelete = async () => { + const confirmBulkDelete = () => { const documentsToDelete = documents.filter((doc) => selectedDocuments.has(doc.id)) if (documentsToDelete.length === 0) return - try { - setIsBulkOperating(true) - - const response = await fetch(`/api/knowledge/${id}/documents`, { - method: 'PATCH', - headers: { - 'Content-Type': 'application/json', + bulkDocumentMutation( + { + knowledgeBaseId: id, + operation: 'delete', + documentIds: documentsToDelete.map((doc) => doc.id), + }, + { + onSuccess: (result) => { + logger.info(`Successfully deleted ${result.successCount} documents`) + refreshDocuments() + setSelectedDocuments(new Set()) + }, + onSettled: () => { + setShowBulkDeleteModal(false) }, - body: JSON.stringify({ - operation: 'delete', - documentIds: documentsToDelete.map((doc) => doc.id), - }), - }) - - if (!response.ok) { - throw new Error('Failed to delete documents') - } - - const result = await response.json() - - if (result.success) { - logger.info(`Successfully deleted ${result.data.successCount} documents`) } - - await refreshDocuments() - - setSelectedDocuments(new Set()) - } catch (err) { - logger.error('Error deleting documents:', err) - } finally { - setIsBulkOperating(false) - setShowBulkDeleteModal(false) - } + ) } const selectedDocumentsList = documents.filter((doc) => selectedDocuments.has(doc.id)) diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/components/base-tags-modal/base-tags-modal.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/components/base-tags-modal/base-tags-modal.tsx index 5e6cb16981..282a85622b 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/components/base-tags-modal/base-tags-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/components/base-tags-modal/base-tags-modal.tsx @@ -22,10 +22,10 @@ import { type TagDefinition, useKnowledgeBaseTagDefinitions, } from '@/hooks/kb/use-knowledge-base-tag-definitions' +import { useCreateTagDefinition, useDeleteTagDefinition } from '@/hooks/queries/knowledge' const logger = createLogger('BaseTagsModal') -/** Field type display labels */ const FIELD_TYPE_LABELS: Record = { text: 'Text', number: 'Number', @@ -45,7 +45,6 @@ interface DocumentListProps { totalCount: number } -/** Displays a list of documents affected by tag operations */ function DocumentList({ documents, totalCount }: DocumentListProps) { const displayLimit = 5 const hasMore = totalCount > displayLimit @@ -95,13 +94,14 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM const { tagDefinitions: kbTagDefinitions, fetchTagDefinitions: refreshTagDefinitions } = useKnowledgeBaseTagDefinitions(knowledgeBaseId) + const createTagMutation = useCreateTagDefinition() + const deleteTagMutation = useDeleteTagDefinition() + const [deleteTagDialogOpen, setDeleteTagDialogOpen] = useState(false) const [selectedTag, setSelectedTag] = useState(null) const [viewDocumentsDialogOpen, setViewDocumentsDialogOpen] = useState(false) - const [isDeletingTag, setIsDeletingTag] = useState(false) const [tagUsageData, setTagUsageData] = useState([]) const [isCreatingTag, setIsCreatingTag] = useState(false) - const [isSavingTag, setIsSavingTag] = useState(false) const [createTagForm, setCreateTagForm] = useState({ displayName: '', fieldType: 'text', @@ -177,13 +177,12 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM } const tagNameConflict = - isCreatingTag && !isSavingTag && hasTagNameConflict(createTagForm.displayName) + isCreatingTag && !createTagMutation.isPending && hasTagNameConflict(createTagForm.displayName) const canSaveTag = () => { return createTagForm.displayName.trim() && !hasTagNameConflict(createTagForm.displayName) } - /** Get slot usage counts per field type */ const getSlotUsageByFieldType = (fieldType: string): { used: number; max: number } => { const config = TAG_SLOT_CONFIG[fieldType as keyof typeof TAG_SLOT_CONFIG] if (!config) return { used: 0, max: 0 } @@ -191,13 +190,11 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM return { used, max: config.maxSlots } } - /** Check if a field type has available slots */ const hasAvailableSlots = (fieldType: string): boolean => { const { used, max } = getSlotUsageByFieldType(fieldType) return used < max } - /** Field type options for Combobox */ const fieldTypeOptions: ComboboxOption[] = useMemo(() => { return SUPPORTED_FIELD_TYPES.filter((type) => hasAvailableSlots(type)).map((type) => { const { used, max } = getSlotUsageByFieldType(type) @@ -211,43 +208,17 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM const saveTagDefinition = async () => { if (!canSaveTag()) return - setIsSavingTag(true) try { - // Check if selected field type has available slots if (!hasAvailableSlots(createTagForm.fieldType)) { throw new Error(`No available slots for ${createTagForm.fieldType} type`) } - // Get the next available slot from the API - const slotResponse = await fetch( - `/api/knowledge/${knowledgeBaseId}/next-available-slot?fieldType=${createTagForm.fieldType}` - ) - if (!slotResponse.ok) { - throw new Error('Failed to get available slot') - } - const slotResult = await slotResponse.json() - if (!slotResult.success || !slotResult.data?.nextAvailableSlot) { - throw new Error('No available tag slots for this field type') - } - - const newTagDefinition = { - tagSlot: slotResult.data.nextAvailableSlot, + await createTagMutation.mutateAsync({ + knowledgeBaseId, displayName: createTagForm.displayName.trim(), fieldType: createTagForm.fieldType, - } - - const response = await fetch(`/api/knowledge/${knowledgeBaseId}/tag-definitions`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(newTagDefinition), }) - if (!response.ok) { - throw new Error('Failed to create tag definition') - } - await Promise.all([refreshTagDefinitions(), fetchTagUsage()]) setCreateTagForm({ @@ -257,27 +228,17 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM setIsCreatingTag(false) } catch (error) { logger.error('Error creating tag definition:', error) - } finally { - setIsSavingTag(false) } } const confirmDeleteTag = async () => { if (!selectedTag) return - setIsDeletingTag(true) try { - const response = await fetch( - `/api/knowledge/${knowledgeBaseId}/tag-definitions/${selectedTag.id}`, - { - method: 'DELETE', - } - ) - - if (!response.ok) { - const errorText = await response.text() - throw new Error(`Failed to delete tag definition: ${response.status} ${errorText}`) - } + await deleteTagMutation.mutateAsync({ + knowledgeBaseId, + tagDefinitionId: selectedTag.id, + }) await Promise.all([refreshTagDefinitions(), fetchTagUsage()]) @@ -285,8 +246,6 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM setSelectedTag(null) } catch (error) { logger.error('Error deleting tag definition:', error) - } finally { - setIsDeletingTag(false) } } @@ -433,11 +392,11 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM className='flex-1' disabled={ !canSaveTag() || - isSavingTag || + createTagMutation.isPending || !hasAvailableSlots(createTagForm.fieldType) } > - {isSavingTag ? 'Creating...' : 'Create Tag'} + {createTagMutation.isPending ? 'Creating...' : 'Create Tag'} @@ -481,13 +440,17 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM - @@ -499,7 +462,7 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM Documents using "{selectedTag?.displayName}"
-

+

{selectedTagUsage?.documentCount || 0} document {selectedTagUsage?.documentCount !== 1 ? 's are' : ' is'} currently using this tag definition. @@ -507,7 +470,7 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM {selectedTagUsage?.documentCount === 0 ? (

-

+

This tag definition is not being used by any documents. You can safely delete it to free up the tag slot.

diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/components/create-base-modal/create-base-modal.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/components/create-base-modal/create-base-modal.tsx index 750dc0f78c..0d8140ed03 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/components/create-base-modal/create-base-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/components/create-base-modal/create-base-modal.tsx @@ -3,7 +3,6 @@ import { useEffect, useRef, useState } from 'react' import { zodResolver } from '@hookform/resolvers/zod' import { createLogger } from '@sim/logger' -import { useQueryClient } from '@tanstack/react-query' import { Loader2, RotateCcw, X } from 'lucide-react' import { useParams } from 'next/navigation' import { useForm } from 'react-hook-form' @@ -23,7 +22,7 @@ import { cn } from '@/lib/core/utils/cn' import { formatFileSize, validateKnowledgeBaseFile } from '@/lib/uploads/utils/file-utils' import { ACCEPT_ATTRIBUTE } from '@/lib/uploads/utils/validation' import { useKnowledgeUpload } from '@/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload' -import { knowledgeKeys } from '@/hooks/queries/knowledge' +import { useCreateKnowledgeBase, useDeleteKnowledgeBase } from '@/hooks/queries/knowledge' const logger = createLogger('CreateBaseModal') @@ -82,10 +81,11 @@ interface SubmitStatus { export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) { const params = useParams() const workspaceId = params.workspaceId as string - const queryClient = useQueryClient() + + const createKnowledgeBaseMutation = useCreateKnowledgeBase(workspaceId) + const deleteKnowledgeBaseMutation = useDeleteKnowledgeBase(workspaceId) const fileInputRef = useRef(null) - const [isSubmitting, setIsSubmitting] = useState(false) const [submitStatus, setSubmitStatus] = useState(null) const [files, setFiles] = useState([]) const [fileError, setFileError] = useState(null) @@ -245,12 +245,14 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) { }) } + const isSubmitting = + createKnowledgeBaseMutation.isPending || deleteKnowledgeBaseMutation.isPending || isUploading + const onSubmit = async (data: FormValues) => { - setIsSubmitting(true) setSubmitStatus(null) try { - const knowledgeBasePayload = { + const newKnowledgeBase = await createKnowledgeBaseMutation.mutateAsync({ name: data.name, description: data.description || undefined, workspaceId: workspaceId, @@ -259,29 +261,8 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) { minSize: data.minChunkSize, overlap: data.overlapSize, }, - } - - const response = await fetch('/api/knowledge', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(knowledgeBasePayload), }) - if (!response.ok) { - const errorData = await response.json() - throw new Error(errorData.error || 'Failed to create knowledge base') - } - - const result = await response.json() - - if (!result.success) { - throw new Error(result.error || 'Failed to create knowledge base') - } - - const newKnowledgeBase = result.data - if (files.length > 0) { try { const uploadedFiles = await uploadFiles(files, newKnowledgeBase.id, { @@ -293,15 +274,11 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) { logger.info(`Successfully uploaded ${uploadedFiles.length} files`) logger.info(`Started processing ${uploadedFiles.length} documents in the background`) - - await queryClient.invalidateQueries({ - queryKey: knowledgeKeys.list(workspaceId), - }) } catch (uploadError) { logger.error('File upload failed, deleting knowledge base:', uploadError) try { - await fetch(`/api/knowledge/${newKnowledgeBase.id}`, { - method: 'DELETE', + await deleteKnowledgeBaseMutation.mutateAsync({ + knowledgeBaseId: newKnowledgeBase.id, }) logger.info(`Deleted orphaned knowledge base: ${newKnowledgeBase.id}`) } catch (deleteError) { @@ -309,10 +286,6 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) { } throw uploadError } - } else { - await queryClient.invalidateQueries({ - queryKey: knowledgeKeys.list(workspaceId), - }) } files.forEach((file) => URL.revokeObjectURL(file.preview)) @@ -325,8 +298,6 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) { type: 'error', message: error instanceof Error ? error.message : 'An unknown error occurred', }) - } finally { - setIsSubmitting(false) } } diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/components/knowledge-header/knowledge-header.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/components/knowledge-header/knowledge-header.tsx index 1c68744493..4ae936af73 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/components/knowledge-header/knowledge-header.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/components/knowledge-header/knowledge-header.tsx @@ -2,7 +2,6 @@ import { useEffect, useState } from 'react' import { createLogger } from '@sim/logger' -import { useQueryClient } from '@tanstack/react-query' import { AlertTriangle, ChevronDown, LibraryBig, MoreHorizontal } from 'lucide-react' import Link from 'next/link' import { @@ -15,7 +14,7 @@ import { } from '@/components/emcn' import { Trash } from '@/components/emcn/icons/trash' import { filterButtonClass } from '@/app/workspace/[workspaceId]/knowledge/components/constants' -import { knowledgeKeys } from '@/hooks/queries/knowledge' +import { useUpdateKnowledgeBase } from '@/hooks/queries/knowledge' const logger = createLogger('KnowledgeHeader') @@ -54,14 +53,13 @@ interface Workspace { } export function KnowledgeHeader({ breadcrumbs, options }: KnowledgeHeaderProps) { - const queryClient = useQueryClient() const [isActionsPopoverOpen, setIsActionsPopoverOpen] = useState(false) const [isWorkspacePopoverOpen, setIsWorkspacePopoverOpen] = useState(false) const [workspaces, setWorkspaces] = useState([]) const [isLoadingWorkspaces, setIsLoadingWorkspaces] = useState(false) - const [isUpdatingWorkspace, setIsUpdatingWorkspace] = useState(false) - // Fetch available workspaces + const updateKnowledgeBase = useUpdateKnowledgeBase() + useEffect(() => { if (!options?.knowledgeBaseId) return @@ -76,7 +74,6 @@ export function KnowledgeHeader({ breadcrumbs, options }: KnowledgeHeaderProps) const data = await response.json() - // Filter workspaces where user has write/admin permissions const availableWorkspaces = data.workspaces .filter((ws: any) => ws.permissions === 'write' || ws.permissions === 'admin') .map((ws: any) => ({ @@ -97,47 +94,27 @@ export function KnowledgeHeader({ breadcrumbs, options }: KnowledgeHeaderProps) }, [options?.knowledgeBaseId]) const handleWorkspaceChange = async (workspaceId: string | null) => { - if (isUpdatingWorkspace || !options?.knowledgeBaseId) return - - try { - setIsUpdatingWorkspace(true) - setIsWorkspacePopoverOpen(false) - - const response = await fetch(`/api/knowledge/${options.knowledgeBaseId}`, { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', + if (updateKnowledgeBase.isPending || !options?.knowledgeBaseId) return + + setIsWorkspacePopoverOpen(false) + + updateKnowledgeBase.mutate( + { + knowledgeBaseId: options.knowledgeBaseId, + updates: { workspaceId }, + }, + { + onSuccess: () => { + logger.info( + `Knowledge base workspace updated: ${options.knowledgeBaseId} -> ${workspaceId}` + ) + options.onWorkspaceChange?.(workspaceId) + }, + onError: (err) => { + logger.error('Error updating workspace:', err) }, - body: JSON.stringify({ - workspaceId, - }), - }) - - if (!response.ok) { - const result = await response.json() - throw new Error(result.error || 'Failed to update workspace') - } - - const result = await response.json() - - if (result.success) { - logger.info( - `Knowledge base workspace updated: ${options.knowledgeBaseId} -> ${workspaceId}` - ) - - await queryClient.invalidateQueries({ - queryKey: knowledgeKeys.detail(options.knowledgeBaseId), - }) - - await options.onWorkspaceChange?.(workspaceId) - } else { - throw new Error(result.error || 'Failed to update workspace') } - } catch (err) { - logger.error('Error updating workspace:', err) - } finally { - setIsUpdatingWorkspace(false) - } + ) } const currentWorkspace = workspaces.find((ws) => ws.id === options?.currentWorkspaceId) @@ -147,7 +124,6 @@ export function KnowledgeHeader({ breadcrumbs, options }: KnowledgeHeaderProps)
{breadcrumbs.map((breadcrumb, index) => { - // Use unique identifier when available, fallback to content-based key const key = breadcrumb.id || `${breadcrumb.label}-${breadcrumb.href || index}` return ( @@ -189,13 +165,13 @@ export function KnowledgeHeader({ breadcrumbs, options }: KnowledgeHeaderProps) + + {!isRestoreVariant && onContinue && ( + + )} +
+
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/checkpoint-confirmation/index.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/checkpoint-confirmation/index.ts new file mode 100644 index 0000000000..612120a4f7 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/checkpoint-confirmation/index.ts @@ -0,0 +1 @@ +export * from './checkpoint-confirmation' diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/file-display.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/file-display/file-display.tsx similarity index 100% rename from apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/file-display.tsx rename to apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/file-display/file-display.tsx diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/file-display/index.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/file-display/index.ts new file mode 100644 index 0000000000..feaf05e59e --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/file-display/index.ts @@ -0,0 +1 @@ +export * from './file-display' diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/index.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/index.ts index 75eb971876..96b6244e92 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/index.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/index.ts @@ -1,5 +1,6 @@ +export * from './checkpoint-confirmation' export * from './file-display' -export { default as CopilotMarkdownRenderer } from './markdown-renderer' +export { CopilotMarkdownRenderer } from './markdown-renderer' export * from './smooth-streaming' export * from './thinking-block' export * from './usage-limit-actions' diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer/index.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer/index.ts new file mode 100644 index 0000000000..62e0a916cd --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer/index.ts @@ -0,0 +1 @@ +export { default as CopilotMarkdownRenderer } from './markdown-renderer' diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer/markdown-renderer.tsx similarity index 100% rename from apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer.tsx rename to apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer/markdown-renderer.tsx diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming/index.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming/index.ts new file mode 100644 index 0000000000..96c0d8364f --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming/index.ts @@ -0,0 +1 @@ +export * from './smooth-streaming' diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming/smooth-streaming.tsx similarity index 75% rename from apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming.tsx rename to apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming/smooth-streaming.tsx index 62a9ae6ba6..c0965808e8 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming/smooth-streaming.tsx @@ -1,27 +1,17 @@ import { memo, useEffect, useRef, useState } from 'react' import { cn } from '@/lib/core/utils/cn' -import CopilotMarkdownRenderer from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer' +import { CopilotMarkdownRenderer } from '../markdown-renderer' -/** - * Character animation delay in milliseconds - */ +/** Character animation delay in milliseconds */ const CHARACTER_DELAY = 3 -/** - * Props for the StreamingIndicator component - */ +/** Props for the StreamingIndicator component */ interface StreamingIndicatorProps { /** Optional class name for layout adjustments */ className?: string } -/** - * StreamingIndicator shows animated dots during message streaming - * Used as a standalone indicator when no content has arrived yet - * - * @param props - Component props - * @returns Animated loading indicator - */ +/** Shows animated dots during message streaming when no content has arrived */ export const StreamingIndicator = memo(({ className }: StreamingIndicatorProps) => (
@@ -34,9 +24,7 @@ export const StreamingIndicator = memo(({ className }: StreamingIndicatorProps) StreamingIndicator.displayName = 'StreamingIndicator' -/** - * Props for the SmoothStreamingText component - */ +/** Props for the SmoothStreamingText component */ interface SmoothStreamingTextProps { /** Content to display with streaming animation */ content: string @@ -44,20 +32,12 @@ interface SmoothStreamingTextProps { isStreaming: boolean } -/** - * SmoothStreamingText component displays text with character-by-character animation - * Creates a smooth streaming effect for AI responses - * - * @param props - Component props - * @returns Streaming text with smooth animation - */ +/** Displays text with character-by-character animation for smooth streaming */ export const SmoothStreamingText = memo( ({ content, isStreaming }: SmoothStreamingTextProps) => { - // Initialize with full content when not streaming to avoid flash on page load const [displayedContent, setDisplayedContent] = useState(() => (isStreaming ? '' : content)) const contentRef = useRef(content) const timeoutRef = useRef(null) - // Initialize index based on streaming state const indexRef = useRef(isStreaming ? 0 : content.length) const isAnimatingRef = useRef(false) @@ -95,7 +75,6 @@ export const SmoothStreamingText = memo( } } } else { - // Streaming ended - show full content immediately if (timeoutRef.current) { clearTimeout(timeoutRef.current) } @@ -119,7 +98,6 @@ export const SmoothStreamingText = memo( ) }, (prevProps, nextProps) => { - // Prevent re-renders during streaming unless content actually changed return ( prevProps.content === nextProps.content && prevProps.isStreaming === nextProps.isStreaming ) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/thinking-block/index.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/thinking-block/index.ts new file mode 100644 index 0000000000..515f72bb04 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/thinking-block/index.ts @@ -0,0 +1 @@ +export * from './thinking-block' diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/thinking-block.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/thinking-block/thinking-block.tsx similarity index 77% rename from apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/thinking-block.tsx rename to apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/thinking-block/thinking-block.tsx index 2b5b023362..de632ca5f4 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/thinking-block.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/thinking-block/thinking-block.tsx @@ -3,66 +3,45 @@ import { memo, useEffect, useMemo, useRef, useState } from 'react' import clsx from 'clsx' import { ChevronUp } from 'lucide-react' -import CopilotMarkdownRenderer from './markdown-renderer' +import { CopilotMarkdownRenderer } from '../markdown-renderer' -/** - * Removes thinking tags (raw or escaped) from streamed content. - */ +/** Removes thinking tags (raw or escaped) and special tags from streamed content */ function stripThinkingTags(text: string): string { return text .replace(/<\/?thinking[^>]*>/gi, '') .replace(/<\/?thinking[^&]*>/gi, '') + .replace(/[\s\S]*?<\/options>/gi, '') + .replace(/[\s\S]*$/gi, '') + .replace(/[\s\S]*?<\/plan>/gi, '') + .replace(/[\s\S]*$/gi, '') .trim() } -/** - * Max height for thinking content before internal scrolling kicks in - */ -const THINKING_MAX_HEIGHT = 150 - -/** - * Height threshold before gradient fade kicks in - */ -const GRADIENT_THRESHOLD = 100 - -/** - * Interval for auto-scroll during streaming (ms) - */ +/** Interval for auto-scroll during streaming (ms) */ const SCROLL_INTERVAL = 50 -/** - * Timer update interval in milliseconds - */ +/** Timer update interval in milliseconds */ const TIMER_UPDATE_INTERVAL = 100 -/** - * Thinking text streaming - much faster than main text - * Essentially instant with minimal delay - */ +/** Thinking text streaming delay - faster than main text */ const THINKING_DELAY = 0.5 const THINKING_CHARS_PER_FRAME = 3 -/** - * Props for the SmoothThinkingText component - */ +/** Props for the SmoothThinkingText component */ interface SmoothThinkingTextProps { content: string isStreaming: boolean } /** - * SmoothThinkingText renders thinking content with fast streaming animation - * Uses gradient fade at top when content is tall enough + * Renders thinking content with fast streaming animation. */ const SmoothThinkingText = memo( ({ content, isStreaming }: SmoothThinkingTextProps) => { - // Initialize with full content when not streaming to avoid flash on page load const [displayedContent, setDisplayedContent] = useState(() => (isStreaming ? '' : content)) - const [showGradient, setShowGradient] = useState(false) const contentRef = useRef(content) const textRef = useRef(null) const rafRef = useRef(null) - // Initialize index based on streaming state const indexRef = useRef(isStreaming ? 0 : content.length) const lastFrameTimeRef = useRef(0) const isAnimatingRef = useRef(false) @@ -88,7 +67,6 @@ const SmoothThinkingText = memo( if (elapsed >= THINKING_DELAY) { if (currentIndex < currentContent.length) { - // Reveal multiple characters per frame for faster streaming const newIndex = Math.min( currentIndex + THINKING_CHARS_PER_FRAME, currentContent.length @@ -110,7 +88,6 @@ const SmoothThinkingText = memo( rafRef.current = requestAnimationFrame(animateText) } } else { - // Streaming ended - show full content immediately if (rafRef.current) { cancelAnimationFrame(rafRef.current) } @@ -127,30 +104,10 @@ const SmoothThinkingText = memo( } }, [content, isStreaming]) - // Check if content height exceeds threshold for gradient - useEffect(() => { - if (textRef.current && isStreaming) { - const height = textRef.current.scrollHeight - setShowGradient(height > GRADIENT_THRESHOLD) - } else { - setShowGradient(false) - } - }, [displayedContent, isStreaming]) - - // Apply vertical gradient fade at the top only when content is tall enough - const gradientStyle = - isStreaming && showGradient - ? { - maskImage: 'linear-gradient(to bottom, transparent 0%, black 30%, black 100%)', - WebkitMaskImage: 'linear-gradient(to bottom, transparent 0%, black 30%, black 100%)', - } - : undefined - return (
@@ -165,9 +122,7 @@ const SmoothThinkingText = memo( SmoothThinkingText.displayName = 'SmoothThinkingText' -/** - * Props for the ThinkingBlock component - */ +/** Props for the ThinkingBlock component */ interface ThinkingBlockProps { /** Content of the thinking block */ content: string @@ -182,13 +137,8 @@ interface ThinkingBlockProps { } /** - * ThinkingBlock component displays AI reasoning/thinking process - * Shows collapsible content with duration timer - * Auto-expands during streaming and collapses when complete - * Auto-collapses when a tool call or other content comes in after it - * - * @param props - Component props - * @returns Thinking block with expandable content and timer + * Displays AI reasoning/thinking process with collapsible content and duration timer. + * Auto-expands during streaming and collapses when complete. */ export function ThinkingBlock({ content, @@ -197,7 +147,6 @@ export function ThinkingBlock({ label = 'Thought', hasSpecialTags = false, }: ThinkingBlockProps) { - // Strip thinking tags from content on render to handle persisted messages const cleanContent = useMemo(() => stripThinkingTags(content || ''), [content]) const [isExpanded, setIsExpanded] = useState(false) @@ -209,12 +158,8 @@ export function ThinkingBlock({ const lastScrollTopRef = useRef(0) const programmaticScrollRef = useRef(false) - /** - * Auto-expands block when streaming with content - * Auto-collapses when streaming ends OR when following content arrives - */ + /** Auto-expands during streaming, auto-collapses when streaming ends or following content arrives */ useEffect(() => { - // Collapse if streaming ended, there's following content, or special tags arrived if (!isStreaming || hasFollowingContent || hasSpecialTags) { setIsExpanded(false) userCollapsedRef.current = false @@ -227,7 +172,6 @@ export function ThinkingBlock({ } }, [isStreaming, cleanContent, hasFollowingContent, hasSpecialTags]) - // Reset start time when streaming begins useEffect(() => { if (isStreaming && !hasFollowingContent) { startTimeRef.current = Date.now() @@ -236,9 +180,7 @@ export function ThinkingBlock({ } }, [isStreaming, hasFollowingContent]) - // Update duration timer during streaming (stop when following content arrives) useEffect(() => { - // Stop timer if not streaming or if there's following content (thinking is done) if (!isStreaming || hasFollowingContent) return const interval = setInterval(() => { @@ -248,7 +190,6 @@ export function ThinkingBlock({ return () => clearInterval(interval) }, [isStreaming, hasFollowingContent]) - // Handle scroll events to detect user scrolling away useEffect(() => { const container = scrollContainerRef.current if (!container || !isExpanded) return @@ -267,7 +208,6 @@ export function ThinkingBlock({ setUserHasScrolledAway(true) } - // Re-stick if user scrolls back to bottom with intent if (userHasScrolledAway && isNearBottom && delta > 10) { setUserHasScrolledAway(false) } @@ -281,7 +221,6 @@ export function ThinkingBlock({ return () => container.removeEventListener('scroll', handleScroll) }, [isExpanded, userHasScrolledAway]) - // Smart auto-scroll: always scroll to bottom while streaming unless user scrolled away useEffect(() => { if (!isStreaming || !isExpanded || userHasScrolledAway) return @@ -302,20 +241,16 @@ export function ThinkingBlock({ return () => window.clearInterval(intervalId) }, [isStreaming, isExpanded, userHasScrolledAway]) - /** - * Formats duration in milliseconds to seconds - * Always shows seconds, rounded to nearest whole second, minimum 1s - */ + /** Formats duration in milliseconds to seconds (minimum 1s) */ const formatDuration = (ms: number) => { const seconds = Math.max(1, Math.round(ms / 1000)) return `${seconds}s` } const hasContent = cleanContent.length > 0 - // Thinking is "done" when streaming ends OR when there's following content (like a tool call) OR when special tags appear const isThinkingDone = !isStreaming || hasFollowingContent || hasSpecialTags const durationText = `${label} for ${formatDuration(duration)}` - // Convert past tense label to present tense for streaming (e.g., "Thought" → "Thinking") + const getStreamingLabel = (lbl: string) => { if (lbl === 'Thought') return 'Thinking' if (lbl.endsWith('ed')) return `${lbl.slice(0, -2)}ing` @@ -323,11 +258,9 @@ export function ThinkingBlock({ } const streamingLabel = getStreamingLabel(label) - // During streaming: show header with shimmer effect + expanded content if (!isThinkingDone) { return (
- {/* Define shimmer keyframes */}